mirror of
https://github.com/ziglang/zig.git
synced 2024-11-15 00:26:57 +00:00
Merge remote-tracking branch 'origin/master' into llvm7
This commit is contained in:
commit
f804310d9f
38
README.md
38
README.md
@ -74,44 +74,6 @@ that counts as "freestanding" for the purposes of this table.
|
||||
* Reddit: [/r/zig](https://www.reddit.com/r/zig)
|
||||
* Email list: [ziglang@googlegroups.com](https://groups.google.com/forum/#!forum/ziglang)
|
||||
|
||||
### Wanted: Windows Developers
|
||||
|
||||
Flesh out the standard library for Windows, streamline Zig installation and
|
||||
distribution for Windows. Work with LLVM and LLD teams to improve
|
||||
PDB/CodeView/MSVC debugging. Implement stack traces for Windows in the MinGW
|
||||
environment and the MSVC environment.
|
||||
|
||||
### Wanted: MacOS and iOS Developers
|
||||
|
||||
Flesh out the standard library for MacOS. Improve the MACH-O linker. Implement
|
||||
stack traces for MacOS. Streamline the process of using Zig to build for
|
||||
iOS.
|
||||
|
||||
### Wanted: Android Developers
|
||||
|
||||
Flesh out the standard library for Android. Streamline the process of using
|
||||
Zig to build for Android and for depending on Zig code on Android.
|
||||
|
||||
### Wanted: Web Developers
|
||||
|
||||
Figure out what are the use cases for compiling Zig to WebAssembly. Create demo
|
||||
projects with it and streamline experience for users trying to output
|
||||
WebAssembly. Work on the documentation generator outputting useful searchable html
|
||||
documentation. Create Zig modules for common web tasks such as WebSockets and gzip.
|
||||
|
||||
### Wanted: Embedded Developers
|
||||
|
||||
Flesh out the standard library for uncommon CPU architectures and OS targets.
|
||||
Drive issue discussion for cross compiling and using Zig in constrained
|
||||
or unusual environments.
|
||||
|
||||
### Wanted: Game Developers
|
||||
|
||||
Create cross platform Zig modules to compete with SDL and GLFW. Create an
|
||||
OpenGL library that does not depend on libc. Drive the usability of Zig
|
||||
for video games. Create a general purpose allocator that does not depend on
|
||||
libc. Create demo games using Zig.
|
||||
|
||||
## Building
|
||||
|
||||
[![Build Status](https://travis-ci.org/ziglang/zig.svg?branch=master)](https://travis-ci.org/ziglang/zig)
|
||||
|
@ -45,6 +45,7 @@ pub fn build(b: *Builder) !void {
|
||||
.c_header_files = nextValue(&index, build_info),
|
||||
.dia_guids_lib = nextValue(&index, build_info),
|
||||
.llvm = undefined,
|
||||
.no_rosegment = b.option(bool, "no-rosegment", "Workaround to enable valgrind builds") orelse false,
|
||||
};
|
||||
ctx.llvm = try findLLVM(b, ctx.llvm_config_exe);
|
||||
|
||||
@ -228,6 +229,8 @@ fn configureStage2(b: *Builder, exe: var, ctx: Context) !void {
|
||||
// TODO turn this into -Dextra-lib-path=/lib option
|
||||
exe.addLibPath("/lib");
|
||||
|
||||
exe.setNoRoSegment(ctx.no_rosegment);
|
||||
|
||||
exe.addIncludeDir("src");
|
||||
exe.addIncludeDir(ctx.cmake_binary_dir);
|
||||
addCppLib(b, exe, ctx.cmake_binary_dir, "zig_cpp");
|
||||
@ -286,4 +289,5 @@ const Context = struct {
|
||||
c_header_files: []const u8,
|
||||
dia_guids_lib: []const u8,
|
||||
llvm: LibraryDep,
|
||||
no_rosegment: bool,
|
||||
};
|
||||
|
3
deps/lld/COFF/Driver.cpp
vendored
3
deps/lld/COFF/Driver.cpp
vendored
@ -72,6 +72,9 @@ bool link(ArrayRef<const char *> Args, bool CanExitEarly, raw_ostream &Diag) {
|
||||
exitLld(errorCount() ? 1 : 0);
|
||||
|
||||
freeArena();
|
||||
ObjFile::Instances.clear();
|
||||
ImportFile::Instances.clear();
|
||||
BitcodeFile::Instances.clear();
|
||||
return !errorCount();
|
||||
}
|
||||
|
||||
|
@ -134,6 +134,58 @@ pub fn main() void {
|
||||
</p>
|
||||
{#see_also|Values|@import|Errors|Root Source File#}
|
||||
{#header_close#}
|
||||
{#header_open|Comments#}
|
||||
{#code_begin|test|comments#}
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
test "comments" {
|
||||
// Comments in Zig start with "//" and end at the next LF byte (end of line).
|
||||
// The below line is a comment, and won't be executed.
|
||||
|
||||
//assert(false);
|
||||
|
||||
const x = true; // another comment
|
||||
assert(x);
|
||||
}
|
||||
{#code_end#}
|
||||
<p>
|
||||
There are no multiline comments in Zig (e.g. like <code>/* */</code>
|
||||
comments in C). This helps allow Zig to have the property that each line
|
||||
of code can be tokenized out of context.
|
||||
</p>
|
||||
{#header_open|Doc comments#}
|
||||
<p>
|
||||
A doc comment is one that begins with exactly three slashes (i.e.
|
||||
<code class="zig">///</code> but not <code class="zig">////</code>);
|
||||
multiple doc comments in a row are merged together to form a multiline
|
||||
doc comment. The doc comment documents whatever immediately follows it.
|
||||
</p>
|
||||
{#code_begin|syntax|doc_comments#}
|
||||
/// A structure for storing a timestamp, with nanosecond precision (this is a
|
||||
/// multiline doc comment).
|
||||
const Timestamp = struct {
|
||||
/// The number of seconds since the epoch (this is also a doc comment).
|
||||
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
|
||||
/// The number of nanoseconds past the second (doc comment again).
|
||||
nanos: u32,
|
||||
|
||||
/// Returns a `Timestamp` struct representing the Unix epoch; that is, the
|
||||
/// moment of 1970 Jan 1 00:00:00 UTC (this is a doc comment too).
|
||||
pub fn unixEpoch() Timestamp {
|
||||
return Timestamp{
|
||||
.seconds = 0,
|
||||
.nanos = 0,
|
||||
};
|
||||
}
|
||||
};
|
||||
{#code_end#}
|
||||
<p>
|
||||
Doc comments are only allowed in certain places; eventually, it will
|
||||
become a compile error have a doc comment in an unexpected place, such as
|
||||
in the middle of an expression, or just before a non-doc comment.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_close#}
|
||||
{#header_open|Values#}
|
||||
{#code_begin|exe|values#}
|
||||
const std = @import("std");
|
||||
@ -4665,24 +4717,24 @@ async fn testSuspendBlock() void {
|
||||
block, while the old thread continued executing the suspend block.
|
||||
</p>
|
||||
<p>
|
||||
However, if you use labeled <code>break</code> on the suspend block, the coroutine
|
||||
However, the coroutine can be directly resumed from the suspend block, in which case it
|
||||
never returns to its resumer and continues executing.
|
||||
</p>
|
||||
{#code_begin|test#}
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
|
||||
test "break from suspend" {
|
||||
test "resume from suspend" {
|
||||
var buf: [500]u8 = undefined;
|
||||
var a = &std.heap.FixedBufferAllocator.init(buf[0..]).allocator;
|
||||
var my_result: i32 = 1;
|
||||
const p = try async<a> testBreakFromSuspend(&my_result);
|
||||
const p = try async<a> testResumeFromSuspend(&my_result);
|
||||
cancel p;
|
||||
std.debug.assert(my_result == 2);
|
||||
}
|
||||
async fn testBreakFromSuspend(my_result: *i32) void {
|
||||
s: suspend |p| {
|
||||
break :s;
|
||||
async fn testResumeFromSuspend(my_result: *i32) void {
|
||||
suspend |p| {
|
||||
resume p;
|
||||
}
|
||||
my_result.* += 1;
|
||||
suspend;
|
||||
@ -7336,7 +7388,7 @@ Defer(body) = ("defer" | "deferror") body
|
||||
|
||||
IfExpression(body) = "if" "(" Expression ")" body option("else" BlockExpression(body))
|
||||
|
||||
SuspendExpression(body) = option(Symbol ":") "suspend" option(("|" Symbol "|" body))
|
||||
SuspendExpression(body) = "suspend" option(("|" Symbol "|" body))
|
||||
|
||||
IfErrorExpression(body) = "if" "(" Expression ")" option("|" option("*") Symbol "|") body "else" "|" Symbol "|" BlockExpression(body)
|
||||
|
||||
|
@ -6,6 +6,7 @@ const c = @import("c.zig");
|
||||
const ir = @import("ir.zig");
|
||||
const Value = @import("value.zig").Value;
|
||||
const Type = @import("type.zig").Type;
|
||||
const Scope = @import("scope.zig").Scope;
|
||||
const event = std.event;
|
||||
const assert = std.debug.assert;
|
||||
const DW = std.dwarf;
|
||||
@ -156,7 +157,7 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
|
||||
llvm_fn_type,
|
||||
) orelse return error.OutOfMemory;
|
||||
|
||||
const want_fn_safety = fn_val.block_scope.safety.get(ofile.comp);
|
||||
const want_fn_safety = fn_val.block_scope.?.safety.get(ofile.comp);
|
||||
if (want_fn_safety and ofile.comp.haveLibC()) {
|
||||
try addLLVMFnAttr(ofile, llvm_fn, "sspstrong");
|
||||
try addLLVMFnAttrStr(ofile, llvm_fn, "stack-protector-buffer-size", "4");
|
||||
@ -168,6 +169,7 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
|
||||
//}
|
||||
|
||||
const fn_type = fn_val.base.typ.cast(Type.Fn).?;
|
||||
const fn_type_normal = &fn_type.key.data.Normal;
|
||||
|
||||
try addLLVMFnAttr(ofile, llvm_fn, "nounwind");
|
||||
//add_uwtable_attr(g, fn_table_entry->llvm_value);
|
||||
@ -209,7 +211,7 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
|
||||
// addLLVMArgAttr(fn_table_entry->llvm_value, (unsigned)err_ret_trace_arg_index, "nonnull");
|
||||
//}
|
||||
|
||||
const cur_ret_ptr = if (fn_type.return_type.handleIsPtr()) llvm.GetParam(llvm_fn, 0) else null;
|
||||
const cur_ret_ptr = if (fn_type_normal.return_type.handleIsPtr()) llvm.GetParam(llvm_fn, 0) else null;
|
||||
|
||||
// build all basic blocks
|
||||
for (code.basic_block_list.toSlice()) |bb| {
|
||||
@ -226,9 +228,86 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
|
||||
|
||||
// TODO set up error return tracing
|
||||
// TODO allocate temporary stack values
|
||||
// TODO create debug variable declarations for variables and allocate all local variables
|
||||
|
||||
const var_list = fn_type.non_key.Normal.variable_list.toSliceConst();
|
||||
// create debug variable declarations for variables and allocate all local variables
|
||||
for (var_list) |var_scope, i| {
|
||||
const var_type = switch (var_scope.data) {
|
||||
Scope.Var.Data.Const => unreachable,
|
||||
Scope.Var.Data.Param => |param| param.typ,
|
||||
};
|
||||
// if (!type_has_bits(var->value->type)) {
|
||||
// continue;
|
||||
// }
|
||||
// if (ir_get_var_is_comptime(var))
|
||||
// continue;
|
||||
// if (type_requires_comptime(var->value->type))
|
||||
// continue;
|
||||
// if (var->src_arg_index == SIZE_MAX) {
|
||||
// var->value_ref = build_alloca(g, var->value->type, buf_ptr(&var->name), var->align_bytes);
|
||||
|
||||
// var->di_loc_var = ZigLLVMCreateAutoVariable(g->dbuilder, get_di_scope(g, var->parent_scope),
|
||||
// buf_ptr(&var->name), import->di_file, (unsigned)(var->decl_node->line + 1),
|
||||
// var->value->type->di_type, !g->strip_debug_symbols, 0);
|
||||
|
||||
// } else {
|
||||
// it's a parameter
|
||||
// assert(var->gen_arg_index != SIZE_MAX);
|
||||
// TypeTableEntry *gen_type;
|
||||
// FnGenParamInfo *gen_info = &fn_table_entry->type_entry->data.fn.gen_param_info[var->src_arg_index];
|
||||
|
||||
if (var_type.handleIsPtr()) {
|
||||
// if (gen_info->is_byval) {
|
||||
// gen_type = var->value->type;
|
||||
// } else {
|
||||
// gen_type = gen_info->type;
|
||||
// }
|
||||
var_scope.data.Param.llvm_value = llvm.GetParam(llvm_fn, @intCast(c_uint, i));
|
||||
} else {
|
||||
// gen_type = var->value->type;
|
||||
var_scope.data.Param.llvm_value = try renderAlloca(ofile, var_type, var_scope.name, Type.Pointer.Align.Abi);
|
||||
}
|
||||
// if (var->decl_node) {
|
||||
// var->di_loc_var = ZigLLVMCreateParameterVariable(g->dbuilder, get_di_scope(g, var->parent_scope),
|
||||
// buf_ptr(&var->name), import->di_file,
|
||||
// (unsigned)(var->decl_node->line + 1),
|
||||
// gen_type->di_type, !g->strip_debug_symbols, 0, (unsigned)(var->gen_arg_index + 1));
|
||||
// }
|
||||
|
||||
// }
|
||||
}
|
||||
|
||||
// TODO finishing error return trace setup. we have to do this after all the allocas.
|
||||
// TODO create debug variable declarations for parameters
|
||||
|
||||
// create debug variable declarations for parameters
|
||||
// rely on the first variables in the variable_list being parameters.
|
||||
//size_t next_var_i = 0;
|
||||
for (fn_type.key.data.Normal.params) |param, i| {
|
||||
//FnGenParamInfo *info = &fn_table_entry->type_entry->data.fn.gen_param_info[param_i];
|
||||
//if (info->gen_index == SIZE_MAX)
|
||||
// continue;
|
||||
const scope_var = var_list[i];
|
||||
//assert(variable->src_arg_index != SIZE_MAX);
|
||||
//next_var_i += 1;
|
||||
//assert(variable);
|
||||
//assert(variable->value_ref);
|
||||
|
||||
if (!param.typ.handleIsPtr()) {
|
||||
//clear_debug_source_node(g);
|
||||
const llvm_param = llvm.GetParam(llvm_fn, @intCast(c_uint, i));
|
||||
_ = renderStoreUntyped(
|
||||
ofile,
|
||||
llvm_param,
|
||||
scope_var.data.Param.llvm_value,
|
||||
Type.Pointer.Align.Abi,
|
||||
Type.Pointer.Vol.Non,
|
||||
);
|
||||
}
|
||||
|
||||
//if (variable->decl_node) {
|
||||
// gen_var_debug_decl(g, variable);
|
||||
//}
|
||||
}
|
||||
|
||||
for (code.basic_block_list.toSlice()) |current_block| {
|
||||
llvm.PositionBuilderAtEnd(ofile.builder, current_block.llvm_block);
|
||||
@ -293,3 +372,79 @@ fn addLLVMFnAttrStr(ofile: *ObjectFile, fn_val: llvm.ValueRef, attr_name: []cons
|
||||
fn addLLVMFnAttrInt(ofile: *ObjectFile, fn_val: llvm.ValueRef, attr_name: []const u8, attr_val: u64) !void {
|
||||
return addLLVMAttrInt(ofile, fn_val, @maxValue(llvm.AttributeIndex), attr_name, attr_val);
|
||||
}
|
||||
|
||||
fn renderLoadUntyped(
|
||||
ofile: *ObjectFile,
|
||||
ptr: llvm.ValueRef,
|
||||
alignment: Type.Pointer.Align,
|
||||
vol: Type.Pointer.Vol,
|
||||
name: [*]const u8,
|
||||
) !llvm.ValueRef {
|
||||
const result = llvm.BuildLoad(ofile.builder, ptr, name) orelse return error.OutOfMemory;
|
||||
switch (vol) {
|
||||
Type.Pointer.Vol.Non => {},
|
||||
Type.Pointer.Vol.Volatile => llvm.SetVolatile(result, 1),
|
||||
}
|
||||
llvm.SetAlignment(result, resolveAlign(ofile, alignment, llvm.GetElementType(llvm.TypeOf(ptr))));
|
||||
return result;
|
||||
}
|
||||
|
||||
fn renderLoad(ofile: *ObjectFile, ptr: llvm.ValueRef, ptr_type: *Type.Pointer, name: [*]const u8) !llvm.ValueRef {
|
||||
return renderLoadUntyped(ofile, ptr, ptr_type.key.alignment, ptr_type.key.vol, name);
|
||||
}
|
||||
|
||||
pub fn getHandleValue(ofile: *ObjectFile, ptr: llvm.ValueRef, ptr_type: *Type.Pointer) !?llvm.ValueRef {
|
||||
const child_type = ptr_type.key.child_type;
|
||||
if (!child_type.hasBits()) {
|
||||
return null;
|
||||
}
|
||||
if (child_type.handleIsPtr()) {
|
||||
return ptr;
|
||||
}
|
||||
return try renderLoad(ofile, ptr, ptr_type, c"");
|
||||
}
|
||||
|
||||
pub fn renderStoreUntyped(
|
||||
ofile: *ObjectFile,
|
||||
value: llvm.ValueRef,
|
||||
ptr: llvm.ValueRef,
|
||||
alignment: Type.Pointer.Align,
|
||||
vol: Type.Pointer.Vol,
|
||||
) !llvm.ValueRef {
|
||||
const result = llvm.BuildStore(ofile.builder, value, ptr) orelse return error.OutOfMemory;
|
||||
switch (vol) {
|
||||
Type.Pointer.Vol.Non => {},
|
||||
Type.Pointer.Vol.Volatile => llvm.SetVolatile(result, 1),
|
||||
}
|
||||
llvm.SetAlignment(result, resolveAlign(ofile, alignment, llvm.TypeOf(value)));
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn renderStore(
|
||||
ofile: *ObjectFile,
|
||||
value: llvm.ValueRef,
|
||||
ptr: llvm.ValueRef,
|
||||
ptr_type: *Type.Pointer,
|
||||
) !llvm.ValueRef {
|
||||
return renderStoreUntyped(ofile, value, ptr, ptr_type.key.alignment, ptr_type.key.vol);
|
||||
}
|
||||
|
||||
pub fn renderAlloca(
|
||||
ofile: *ObjectFile,
|
||||
var_type: *Type,
|
||||
name: []const u8,
|
||||
alignment: Type.Pointer.Align,
|
||||
) !llvm.ValueRef {
|
||||
const llvm_var_type = try var_type.getLlvmType(ofile.arena, ofile.context);
|
||||
const name_with_null = try std.cstr.addNullByte(ofile.arena, name);
|
||||
const result = llvm.BuildAlloca(ofile.builder, llvm_var_type, name_with_null.ptr) orelse return error.OutOfMemory;
|
||||
llvm.SetAlignment(result, resolveAlign(ofile, alignment, llvm_var_type));
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn resolveAlign(ofile: *ObjectFile, alignment: Type.Pointer.Align, llvm_type: llvm.TypeRef) u32 {
|
||||
return switch (alignment) {
|
||||
Type.Pointer.Align.Abi => return llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, llvm_type),
|
||||
Type.Pointer.Align.Override => |a| a,
|
||||
};
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ const CInt = @import("c_int.zig").CInt;
|
||||
pub const EventLoopLocal = struct {
|
||||
loop: *event.Loop,
|
||||
llvm_handle_pool: std.atomic.Stack(llvm.ContextRef),
|
||||
lld_lock: event.Lock,
|
||||
|
||||
/// TODO pool these so that it doesn't have to lock
|
||||
prng: event.Locked(std.rand.DefaultPrng),
|
||||
@ -55,6 +56,7 @@ pub const EventLoopLocal = struct {
|
||||
|
||||
return EventLoopLocal{
|
||||
.loop = loop,
|
||||
.lld_lock = event.Lock.init(loop),
|
||||
.llvm_handle_pool = std.atomic.Stack(llvm.ContextRef).init(),
|
||||
.prng = event.Locked(std.rand.DefaultPrng).init(loop, std.rand.DefaultPrng.init(seed)),
|
||||
.native_libc = event.Future(LibCInstallation).init(loop),
|
||||
@ -63,6 +65,7 @@ pub const EventLoopLocal = struct {
|
||||
|
||||
/// Must be called only after EventLoop.run completes.
|
||||
fn deinit(self: *EventLoopLocal) void {
|
||||
self.lld_lock.deinit();
|
||||
while (self.llvm_handle_pool.pop()) |node| {
|
||||
c.LLVMContextDispose(node.data);
|
||||
self.loop.allocator.destroy(node);
|
||||
@ -220,12 +223,14 @@ pub const Compilation = struct {
|
||||
int_type_table: event.Locked(IntTypeTable),
|
||||
array_type_table: event.Locked(ArrayTypeTable),
|
||||
ptr_type_table: event.Locked(PtrTypeTable),
|
||||
fn_type_table: event.Locked(FnTypeTable),
|
||||
|
||||
c_int_types: [CInt.list.len]*Type.Int,
|
||||
|
||||
const IntTypeTable = std.HashMap(*const Type.Int.Key, *Type.Int, Type.Int.Key.hash, Type.Int.Key.eql);
|
||||
const ArrayTypeTable = std.HashMap(*const Type.Array.Key, *Type.Array, Type.Array.Key.hash, Type.Array.Key.eql);
|
||||
const PtrTypeTable = std.HashMap(*const Type.Pointer.Key, *Type.Pointer, Type.Pointer.Key.hash, Type.Pointer.Key.eql);
|
||||
const FnTypeTable = std.HashMap(*const Type.Fn.Key, *Type.Fn, Type.Fn.Key.hash, Type.Fn.Key.eql);
|
||||
const TypeTable = std.HashMap([]const u8, *Type, mem.hash_slice_u8, mem.eql_slice_u8);
|
||||
|
||||
const CompileErrList = std.ArrayList(*Msg);
|
||||
@ -384,6 +389,7 @@ pub const Compilation = struct {
|
||||
.int_type_table = event.Locked(IntTypeTable).init(loop, IntTypeTable.init(loop.allocator)),
|
||||
.array_type_table = event.Locked(ArrayTypeTable).init(loop, ArrayTypeTable.init(loop.allocator)),
|
||||
.ptr_type_table = event.Locked(PtrTypeTable).init(loop, PtrTypeTable.init(loop.allocator)),
|
||||
.fn_type_table = event.Locked(FnTypeTable).init(loop, FnTypeTable.init(loop.allocator)),
|
||||
.c_int_types = undefined,
|
||||
|
||||
.meta_type = undefined,
|
||||
@ -414,6 +420,7 @@ pub const Compilation = struct {
|
||||
comp.int_type_table.private_data.deinit();
|
||||
comp.array_type_table.private_data.deinit();
|
||||
comp.ptr_type_table.private_data.deinit();
|
||||
comp.fn_type_table.private_data.deinit();
|
||||
comp.arena_allocator.deinit();
|
||||
comp.loop.allocator.destroy(comp);
|
||||
}
|
||||
@ -1160,13 +1167,48 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
|
||||
fn_decl.value = Decl.Fn.Val{ .Fn = fn_val };
|
||||
symbol_name_consumed = true;
|
||||
|
||||
// Define local parameter variables
|
||||
const root_scope = fn_decl.base.findRootScope();
|
||||
for (fn_type.key.data.Normal.params) |param, i| {
|
||||
//AstNode *param_decl_node = get_param_decl_node(fn_table_entry, i);
|
||||
const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", fn_decl.fn_proto.params.at(i).*);
|
||||
const name_token = param_decl.name_token orelse {
|
||||
try comp.addCompileError(root_scope, Span{
|
||||
.first = param_decl.firstToken(),
|
||||
.last = param_decl.type_node.firstToken(),
|
||||
}, "missing parameter name");
|
||||
return error.SemanticAnalysisFailed;
|
||||
};
|
||||
const param_name = root_scope.tree.tokenSlice(name_token);
|
||||
|
||||
// if (is_noalias && get_codegen_ptr_type(param_type) == nullptr) {
|
||||
// add_node_error(g, param_decl_node, buf_sprintf("noalias on non-pointer parameter"));
|
||||
// }
|
||||
|
||||
// TODO check for shadowing
|
||||
|
||||
const var_scope = try Scope.Var.createParam(
|
||||
comp,
|
||||
fn_val.child_scope,
|
||||
param_name,
|
||||
¶m_decl.base,
|
||||
i,
|
||||
param.typ,
|
||||
);
|
||||
fn_val.child_scope = &var_scope.base;
|
||||
|
||||
try fn_type.non_key.Normal.variable_list.append(var_scope);
|
||||
}
|
||||
|
||||
const analyzed_code = try await (async comp.genAndAnalyzeCode(
|
||||
&fndef_scope.base,
|
||||
fn_val.child_scope,
|
||||
body_node,
|
||||
fn_type.return_type,
|
||||
fn_type.key.data.Normal.return_type,
|
||||
) catch unreachable);
|
||||
errdefer analyzed_code.destroy(comp.gpa());
|
||||
|
||||
assert(fn_val.block_scope != null);
|
||||
|
||||
// Kick off rendering to LLVM module, but it doesn't block the fn decl
|
||||
// analysis from being complete.
|
||||
try comp.prelink_group.call(codegen.renderToLlvm, comp, fn_val, analyzed_code);
|
||||
@ -1199,14 +1241,13 @@ async fn analyzeFnType(comp: *Compilation, scope: *Scope, fn_proto: *ast.Node.Fn
|
||||
|
||||
var params = ArrayList(Type.Fn.Param).init(comp.gpa());
|
||||
var params_consumed = false;
|
||||
defer if (params_consumed) {
|
||||
defer if (!params_consumed) {
|
||||
for (params.toSliceConst()) |param| {
|
||||
param.typ.base.deref(comp);
|
||||
}
|
||||
params.deinit();
|
||||
};
|
||||
|
||||
const is_var_args = false;
|
||||
{
|
||||
var it = fn_proto.params.iterator(0);
|
||||
while (it.next()) |param_node_ptr| {
|
||||
@ -1219,8 +1260,29 @@ async fn analyzeFnType(comp: *Compilation, scope: *Scope, fn_proto: *ast.Node.Fn
|
||||
});
|
||||
}
|
||||
}
|
||||
const fn_type = try Type.Fn.create(comp, return_type, params.toOwnedSlice(), is_var_args);
|
||||
|
||||
const key = Type.Fn.Key{
|
||||
.alignment = null,
|
||||
.data = Type.Fn.Key.Data{
|
||||
.Normal = Type.Fn.Key.Normal{
|
||||
.return_type = return_type,
|
||||
.params = params.toOwnedSlice(),
|
||||
.is_var_args = false, // TODO
|
||||
.cc = Type.Fn.CallingConvention.Auto, // TODO
|
||||
},
|
||||
},
|
||||
};
|
||||
params_consumed = true;
|
||||
var key_consumed = false;
|
||||
defer if (!key_consumed) {
|
||||
for (key.data.Normal.params) |param| {
|
||||
param.typ.base.deref(comp);
|
||||
}
|
||||
comp.gpa().free(key.data.Normal.params);
|
||||
};
|
||||
|
||||
const fn_type = try await (async Type.Fn.get(comp, key) catch unreachable);
|
||||
key_consumed = true;
|
||||
errdefer fn_type.base.base.deref(comp);
|
||||
|
||||
return fn_type;
|
||||
|
@ -10,8 +10,10 @@ const assert = std.debug.assert;
|
||||
const Token = std.zig.Token;
|
||||
const Span = @import("errmsg.zig").Span;
|
||||
const llvm = @import("llvm.zig");
|
||||
const ObjectFile = @import("codegen.zig").ObjectFile;
|
||||
const codegen = @import("codegen.zig");
|
||||
const ObjectFile = codegen.ObjectFile;
|
||||
const Decl = @import("decl.zig").Decl;
|
||||
const mem = std.mem;
|
||||
|
||||
pub const LVal = enum {
|
||||
None,
|
||||
@ -122,6 +124,8 @@ pub const Inst = struct {
|
||||
Id.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
|
||||
Id.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
|
||||
Id.PtrType => return await (async @fieldParentPtr(PtrType, "base", base).analyze(ira) catch unreachable),
|
||||
Id.VarPtr => return await (async @fieldParentPtr(VarPtr, "base", base).analyze(ira) catch unreachable),
|
||||
Id.LoadPtr => return await (async @fieldParentPtr(LoadPtr, "base", base).analyze(ira) catch unreachable),
|
||||
}
|
||||
}
|
||||
|
||||
@ -130,6 +134,8 @@ pub const Inst = struct {
|
||||
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
|
||||
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
|
||||
Id.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
|
||||
Id.VarPtr => return @fieldParentPtr(VarPtr, "base", base).render(ofile, fn_val),
|
||||
Id.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).render(ofile, fn_val),
|
||||
Id.DeclRef => unreachable,
|
||||
Id.PtrType => unreachable,
|
||||
Id.Ref => @panic("TODO"),
|
||||
@ -248,6 +254,8 @@ pub const Inst = struct {
|
||||
Call,
|
||||
DeclRef,
|
||||
PtrType,
|
||||
VarPtr,
|
||||
LoadPtr,
|
||||
};
|
||||
|
||||
pub const Call = struct {
|
||||
@ -281,11 +289,13 @@ pub const Inst = struct {
|
||||
return error.SemanticAnalysisFailed;
|
||||
};
|
||||
|
||||
if (fn_type.params.len != self.params.args.len) {
|
||||
const fn_type_param_count = fn_type.paramCount();
|
||||
|
||||
if (fn_type_param_count != self.params.args.len) {
|
||||
try ira.addCompileError(
|
||||
self.base.span,
|
||||
"expected {} arguments, found {}",
|
||||
fn_type.params.len,
|
||||
fn_type_param_count,
|
||||
self.params.args.len,
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
@ -299,7 +309,7 @@ pub const Inst = struct {
|
||||
.fn_ref = fn_ref,
|
||||
.args = args,
|
||||
});
|
||||
new_inst.val = IrVal{ .KnownType = fn_type.return_type };
|
||||
new_inst.val = IrVal{ .KnownType = fn_type.key.data.Normal.return_type };
|
||||
return new_inst;
|
||||
}
|
||||
|
||||
@ -489,6 +499,133 @@ pub const Inst = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarPtr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
var_scope: *Scope.Var,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const VarPtr) void {
|
||||
std.debug.warn("{}", inst.params.var_scope.name);
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const VarPtr) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub async fn analyze(self: *const VarPtr, ira: *Analyze) !*Inst {
|
||||
switch (self.params.var_scope.data) {
|
||||
Scope.Var.Data.Const => @panic("TODO"),
|
||||
Scope.Var.Data.Param => |param| {
|
||||
const new_inst = try ira.irb.build(
|
||||
Inst.VarPtr,
|
||||
self.base.scope,
|
||||
self.base.span,
|
||||
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
|
||||
);
|
||||
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
||||
.child_type = param.typ,
|
||||
.mut = Type.Pointer.Mut.Const,
|
||||
.vol = Type.Pointer.Vol.Non,
|
||||
.size = Type.Pointer.Size.One,
|
||||
.alignment = Type.Pointer.Align.Abi,
|
||||
}) catch unreachable);
|
||||
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
|
||||
return new_inst;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(self: *VarPtr, ofile: *ObjectFile, fn_val: *Value.Fn) llvm.ValueRef {
|
||||
switch (self.params.var_scope.data) {
|
||||
Scope.Var.Data.Const => unreachable, // turned into Inst.Const in analyze pass
|
||||
Scope.Var.Data.Param => |param| return param.llvm_value,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const LoadPtr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const LoadPtr) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const LoadPtr) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub async fn analyze(self: *const LoadPtr, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
const target_type = target.getKnownType();
|
||||
if (target_type.id != Type.Id.Pointer) {
|
||||
try ira.addCompileError(self.base.span, "dereference of non pointer type '{}'", target_type.name);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
const ptr_type = @fieldParentPtr(Type.Pointer, "base", target_type);
|
||||
// if (instr_is_comptime(ptr)) {
|
||||
// if (ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst ||
|
||||
// ptr->value.data.x_ptr.mut == ConstPtrMutComptimeVar)
|
||||
// {
|
||||
// ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &ptr->value);
|
||||
// if (pointee->special != ConstValSpecialRuntime) {
|
||||
// IrInstruction *result = ir_create_const(&ira->new_irb, source_instruction->scope,
|
||||
// source_instruction->source_node, child_type);
|
||||
// copy_const_val(&result->value, pointee, ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst);
|
||||
// result->value.type = child_type;
|
||||
// return result;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
const new_inst = try ira.irb.build(
|
||||
Inst.LoadPtr,
|
||||
self.base.scope,
|
||||
self.base.span,
|
||||
Inst.LoadPtr.Params{ .target = target },
|
||||
);
|
||||
new_inst.val = IrVal{ .KnownType = ptr_type.key.child_type };
|
||||
return new_inst;
|
||||
}
|
||||
|
||||
pub fn render(self: *LoadPtr, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef {
|
||||
const child_type = self.base.getKnownType();
|
||||
if (!child_type.hasBits()) {
|
||||
return null;
|
||||
}
|
||||
const ptr = self.params.target.llvm_value.?;
|
||||
const ptr_type = self.params.target.getKnownType().cast(Type.Pointer).?;
|
||||
|
||||
return try codegen.getHandleValue(ofile, ptr, ptr_type);
|
||||
|
||||
//uint32_t unaligned_bit_count = ptr_type->data.pointer.unaligned_bit_count;
|
||||
//if (unaligned_bit_count == 0)
|
||||
// return get_handle_value(g, ptr, child_type, ptr_type);
|
||||
|
||||
//bool big_endian = g->is_big_endian;
|
||||
|
||||
//assert(!handle_is_ptr(child_type));
|
||||
//LLVMValueRef containing_int = gen_load(g, ptr, ptr_type, "");
|
||||
|
||||
//uint32_t bit_offset = ptr_type->data.pointer.bit_offset;
|
||||
//uint32_t host_bit_count = LLVMGetIntTypeWidth(LLVMTypeOf(containing_int));
|
||||
//uint32_t shift_amt = big_endian ? host_bit_count - bit_offset - unaligned_bit_count : bit_offset;
|
||||
|
||||
//LLVMValueRef shift_amt_val = LLVMConstInt(LLVMTypeOf(containing_int), shift_amt, false);
|
||||
//LLVMValueRef shifted_value = LLVMBuildLShr(g->builder, containing_int, shift_amt_val, "");
|
||||
|
||||
//return LLVMBuildTrunc(g->builder, shifted_value, child_type->type_ref, "");
|
||||
}
|
||||
};
|
||||
|
||||
pub const PtrType = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
@ -1158,6 +1295,7 @@ pub const Builder = struct {
|
||||
Scope.Id.Block,
|
||||
Scope.Id.Defer,
|
||||
Scope.Id.DeferExpr,
|
||||
Scope.Id.Var,
|
||||
=> scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
@ -1259,8 +1397,8 @@ pub const Builder = struct {
|
||||
var child_scope = outer_block_scope;
|
||||
|
||||
if (parent_scope.findFnDef()) |fndef_scope| {
|
||||
if (fndef_scope.fn_val.child_scope == parent_scope) {
|
||||
fndef_scope.fn_val.block_scope = block_scope;
|
||||
if (fndef_scope.fn_val.?.block_scope == null) {
|
||||
fndef_scope.fn_val.?.block_scope = block_scope;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1490,20 +1628,23 @@ pub const Builder = struct {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
}
|
||||
|
||||
//VariableTableEntry *var = find_variable(irb->codegen, scope, variable_name);
|
||||
//if (var) {
|
||||
// IrInstruction *var_ptr = ir_build_var_ptr(irb, scope, node, var);
|
||||
// if (lval == LValPtr)
|
||||
// return var_ptr;
|
||||
// else
|
||||
// return ir_build_load_ptr(irb, scope, node, var_ptr);
|
||||
//}
|
||||
|
||||
if (await (async irb.findDecl(scope, name) catch unreachable)) |decl| {
|
||||
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
|
||||
.decl = decl,
|
||||
.lval = lval,
|
||||
});
|
||||
switch (await (async irb.findIdent(scope, name) catch unreachable)) {
|
||||
Ident.Decl => |decl| {
|
||||
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
|
||||
.decl = decl,
|
||||
.lval = lval,
|
||||
});
|
||||
},
|
||||
Ident.VarScope => |var_scope| {
|
||||
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
|
||||
switch (lval) {
|
||||
LVal.Ptr => return var_ptr,
|
||||
LVal.None => {
|
||||
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
|
||||
},
|
||||
}
|
||||
},
|
||||
Ident.NotFound => {},
|
||||
}
|
||||
|
||||
//if (node->owner->any_imports_failed) {
|
||||
@ -1544,6 +1685,7 @@ pub const Builder = struct {
|
||||
Scope.Id.Block,
|
||||
Scope.Id.Decls,
|
||||
Scope.Id.Root,
|
||||
Scope.Id.Var,
|
||||
=> scope = scope.parent orelse break,
|
||||
|
||||
Scope.Id.DeferExpr => unreachable,
|
||||
@ -1594,6 +1736,7 @@ pub const Builder = struct {
|
||||
|
||||
Scope.Id.CompTime,
|
||||
Scope.Id.Block,
|
||||
Scope.Id.Var,
|
||||
=> scope = scope.parent orelse return is_noreturn,
|
||||
|
||||
Scope.Id.DeferExpr => unreachable,
|
||||
@ -1672,8 +1815,10 @@ pub const Builder = struct {
|
||||
Type.Pointer.Size,
|
||||
LVal,
|
||||
*Decl,
|
||||
*Scope.Var,
|
||||
=> {},
|
||||
// it's ok to add more types here, just make sure any instructions are ref'd appropriately
|
||||
// it's ok to add more types here, just make sure that
|
||||
// any instructions and basic blocks are ref'd appropriately
|
||||
else => @compileError("unrecognized type in Params: " ++ @typeName(FieldType)),
|
||||
}
|
||||
}
|
||||
@ -1771,18 +1916,30 @@ pub const Builder = struct {
|
||||
//// the above blocks are rendered by ir_gen after the rest of codegen
|
||||
}
|
||||
|
||||
async fn findDecl(irb: *Builder, scope: *Scope, name: []const u8) ?*Decl {
|
||||
const Ident = union(enum) {
|
||||
NotFound,
|
||||
Decl: *Decl,
|
||||
VarScope: *Scope.Var,
|
||||
};
|
||||
|
||||
async fn findIdent(irb: *Builder, scope: *Scope, name: []const u8) Ident {
|
||||
var s = scope;
|
||||
while (true) {
|
||||
switch (s.id) {
|
||||
Scope.Id.Root => return Ident.NotFound,
|
||||
Scope.Id.Decls => {
|
||||
const decls = @fieldParentPtr(Scope.Decls, "base", s);
|
||||
const table = await (async decls.getTableReadOnly() catch unreachable);
|
||||
if (table.get(name)) |entry| {
|
||||
return entry.value;
|
||||
return Ident{ .Decl = entry.value };
|
||||
}
|
||||
},
|
||||
Scope.Id.Var => {
|
||||
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
|
||||
if (mem.eql(u8, var_scope.name, name)) {
|
||||
return Ident{ .VarScope = var_scope };
|
||||
}
|
||||
},
|
||||
Scope.Id.Root => return null,
|
||||
else => {},
|
||||
}
|
||||
s = s.parent.?;
|
||||
|
@ -80,15 +80,22 @@ pub async fn link(comp: *Compilation) !void {
|
||||
|
||||
const extern_ofmt = toExternObjectFormatType(comp.target.getObjectFormat());
|
||||
const args_slice = ctx.args.toSlice();
|
||||
// Not evented I/O. LLD does its own multithreading internally.
|
||||
if (!ZigLLDLink(extern_ofmt, args_slice.ptr, args_slice.len, linkDiagCallback, @ptrCast(*c_void, &ctx))) {
|
||||
if (!ctx.link_msg.isNull()) {
|
||||
// TODO capture these messages and pass them through the system, reporting them through the
|
||||
// event system instead of printing them directly here.
|
||||
// perhaps try to parse and understand them.
|
||||
std.debug.warn("{}\n", ctx.link_msg.toSliceConst());
|
||||
|
||||
{
|
||||
// LLD is not thread-safe, so we grab a global lock.
|
||||
const held = await (async comp.event_loop_local.lld_lock.acquire() catch unreachable);
|
||||
defer held.release();
|
||||
|
||||
// Not evented I/O. LLD does its own multithreading internally.
|
||||
if (!ZigLLDLink(extern_ofmt, args_slice.ptr, args_slice.len, linkDiagCallback, @ptrCast(*c_void, &ctx))) {
|
||||
if (!ctx.link_msg.isNull()) {
|
||||
// TODO capture these messages and pass them through the system, reporting them through the
|
||||
// event system instead of printing them directly here.
|
||||
// perhaps try to parse and understand them.
|
||||
std.debug.warn("{}\n", ctx.link_msg.toSliceConst());
|
||||
}
|
||||
return error.LinkFailed;
|
||||
}
|
||||
return error.LinkFailed;
|
||||
}
|
||||
}
|
||||
|
||||
@ -672,7 +679,13 @@ const DarwinPlatform = struct {
|
||||
};
|
||||
|
||||
var had_extra: bool = undefined;
|
||||
try darwinGetReleaseVersion(ver_str, &result.major, &result.minor, &result.micro, &had_extra,);
|
||||
try darwinGetReleaseVersion(
|
||||
ver_str,
|
||||
&result.major,
|
||||
&result.minor,
|
||||
&result.micro,
|
||||
&had_extra,
|
||||
);
|
||||
if (had_extra or result.major != 10 or result.minor >= 100 or result.micro >= 100) {
|
||||
return error.InvalidDarwinVersionString;
|
||||
}
|
||||
@ -713,7 +726,7 @@ fn darwinGetReleaseVersion(str: []const u8, major: *u32, minor: *u32, micro: *u3
|
||||
return error.InvalidDarwinVersionString;
|
||||
|
||||
var start_pos: usize = 0;
|
||||
for ([]*u32{major, minor, micro}) |v| {
|
||||
for ([]*u32{ major, minor, micro }) |v| {
|
||||
const dot_pos = mem.indexOfScalarPos(u8, str, start_pos, '.');
|
||||
const end_pos = dot_pos orelse str.len;
|
||||
v.* = std.fmt.parseUnsigned(u32, str[start_pos..end_pos], 10) catch return error.InvalidDarwinVersionString;
|
||||
|
@ -30,6 +30,7 @@ pub const AddGlobal = c.LLVMAddGlobal;
|
||||
pub const AddModuleCodeViewFlag = c.ZigLLVMAddModuleCodeViewFlag;
|
||||
pub const AddModuleDebugInfoFlag = c.ZigLLVMAddModuleDebugInfoFlag;
|
||||
pub const ArrayType = c.LLVMArrayType;
|
||||
pub const BuildLoad = c.LLVMBuildLoad;
|
||||
pub const ClearCurrentDebugLocation = c.ZigLLVMClearCurrentDebugLocation;
|
||||
pub const ConstAllOnes = c.LLVMConstAllOnes;
|
||||
pub const ConstArray = c.LLVMConstArray;
|
||||
@ -95,13 +96,25 @@ pub const SetInitializer = c.LLVMSetInitializer;
|
||||
pub const SetLinkage = c.LLVMSetLinkage;
|
||||
pub const SetTarget = c.LLVMSetTarget;
|
||||
pub const SetUnnamedAddr = c.LLVMSetUnnamedAddr;
|
||||
pub const SetVolatile = c.LLVMSetVolatile;
|
||||
pub const StructTypeInContext = c.LLVMStructTypeInContext;
|
||||
pub const TokenTypeInContext = c.LLVMTokenTypeInContext;
|
||||
pub const TypeOf = c.LLVMTypeOf;
|
||||
pub const VoidTypeInContext = c.LLVMVoidTypeInContext;
|
||||
pub const X86FP80TypeInContext = c.LLVMX86FP80TypeInContext;
|
||||
pub const X86MMXTypeInContext = c.LLVMX86MMXTypeInContext;
|
||||
|
||||
pub const GetElementType = LLVMGetElementType;
|
||||
extern fn LLVMGetElementType(Ty: TypeRef) TypeRef;
|
||||
|
||||
pub const TypeOf = LLVMTypeOf;
|
||||
extern fn LLVMTypeOf(Val: ValueRef) TypeRef;
|
||||
|
||||
pub const BuildStore = LLVMBuildStore;
|
||||
extern fn LLVMBuildStore(arg0: BuilderRef, Val: ValueRef, Ptr: ValueRef) ?ValueRef;
|
||||
|
||||
pub const BuildAlloca = LLVMBuildAlloca;
|
||||
extern fn LLVMBuildAlloca(arg0: BuilderRef, Ty: TypeRef, Name: ?[*]const u8) ?ValueRef;
|
||||
|
||||
pub const ConstInBoundsGEP = LLVMConstInBoundsGEP;
|
||||
pub extern fn LLVMConstInBoundsGEP(ConstantVal: ValueRef, ConstantIndices: [*]ValueRef, NumIndices: c_uint) ?ValueRef;
|
||||
|
||||
|
@ -6,23 +6,26 @@ const Compilation = @import("compilation.zig").Compilation;
|
||||
const mem = std.mem;
|
||||
const ast = std.zig.ast;
|
||||
const Value = @import("value.zig").Value;
|
||||
const Type = @import("type.zig").Type;
|
||||
const ir = @import("ir.zig");
|
||||
const Span = @import("errmsg.zig").Span;
|
||||
const assert = std.debug.assert;
|
||||
const event = std.event;
|
||||
const llvm = @import("llvm.zig");
|
||||
|
||||
pub const Scope = struct {
|
||||
id: Id,
|
||||
parent: ?*Scope,
|
||||
ref_count: usize,
|
||||
ref_count: std.atomic.Int(usize),
|
||||
|
||||
/// Thread-safe
|
||||
pub fn ref(base: *Scope) void {
|
||||
base.ref_count += 1;
|
||||
_ = base.ref_count.incr();
|
||||
}
|
||||
|
||||
/// Thread-safe
|
||||
pub fn deref(base: *Scope, comp: *Compilation) void {
|
||||
base.ref_count -= 1;
|
||||
if (base.ref_count == 0) {
|
||||
if (base.ref_count.decr() == 1) {
|
||||
if (base.parent) |parent| parent.deref(comp);
|
||||
switch (base.id) {
|
||||
Id.Root => @fieldParentPtr(Root, "base", base).destroy(comp),
|
||||
@ -32,6 +35,7 @@ pub const Scope = struct {
|
||||
Id.CompTime => @fieldParentPtr(CompTime, "base", base).destroy(comp),
|
||||
Id.Defer => @fieldParentPtr(Defer, "base", base).destroy(comp),
|
||||
Id.DeferExpr => @fieldParentPtr(DeferExpr, "base", base).destroy(comp),
|
||||
Id.Var => @fieldParentPtr(Var, "base", base).destroy(comp),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -49,15 +53,15 @@ pub const Scope = struct {
|
||||
var scope = base;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
Id.FnDef => return @fieldParentPtr(FnDef, "base", base),
|
||||
Id.Decls => return null,
|
||||
Id.FnDef => return @fieldParentPtr(FnDef, "base", scope),
|
||||
Id.Root, Id.Decls => return null,
|
||||
|
||||
Id.Block,
|
||||
Id.Defer,
|
||||
Id.DeferExpr,
|
||||
Id.CompTime,
|
||||
Id.Root,
|
||||
=> scope = scope.parent orelse return null,
|
||||
Id.Var,
|
||||
=> scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -66,7 +70,7 @@ pub const Scope = struct {
|
||||
var scope = base;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
Id.DeferExpr => return @fieldParentPtr(DeferExpr, "base", base),
|
||||
Id.DeferExpr => return @fieldParentPtr(DeferExpr, "base", scope),
|
||||
|
||||
Id.FnDef,
|
||||
Id.Decls,
|
||||
@ -76,11 +80,21 @@ pub const Scope = struct {
|
||||
Id.Defer,
|
||||
Id.CompTime,
|
||||
Id.Root,
|
||||
Id.Var,
|
||||
=> scope = scope.parent orelse return null,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn init(base: *Scope, id: Id, parent: *Scope) void {
|
||||
base.* = Scope{
|
||||
.id = id,
|
||||
.parent = parent,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
};
|
||||
parent.ref();
|
||||
}
|
||||
|
||||
pub const Id = enum {
|
||||
Root,
|
||||
Decls,
|
||||
@ -89,6 +103,7 @@ pub const Scope = struct {
|
||||
CompTime,
|
||||
Defer,
|
||||
DeferExpr,
|
||||
Var,
|
||||
};
|
||||
|
||||
pub const Root = struct {
|
||||
@ -100,16 +115,16 @@ pub const Scope = struct {
|
||||
/// Takes ownership of realpath
|
||||
/// Takes ownership of tree, will deinit and destroy when done.
|
||||
pub fn create(comp: *Compilation, tree: *ast.Tree, realpath: []u8) !*Root {
|
||||
const self = try comp.gpa().create(Root{
|
||||
const self = try comp.gpa().createOne(Root);
|
||||
self.* = Root{
|
||||
.base = Scope{
|
||||
.id = Id.Root,
|
||||
.parent = null,
|
||||
.ref_count = 1,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.tree = tree,
|
||||
.realpath = realpath,
|
||||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
};
|
||||
|
||||
return self;
|
||||
}
|
||||
@ -137,16 +152,13 @@ pub const Scope = struct {
|
||||
|
||||
/// Creates a Decls scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*Decls {
|
||||
const self = try comp.gpa().create(Decls{
|
||||
.base = Scope{
|
||||
.id = Id.Decls,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
const self = try comp.gpa().createOne(Decls);
|
||||
self.* = Decls{
|
||||
.base = undefined,
|
||||
.table = event.Locked(Decl.Table).init(comp.loop, Decl.Table.init(comp.gpa())),
|
||||
.name_future = event.Future(void).init(comp.loop),
|
||||
});
|
||||
parent.ref();
|
||||
};
|
||||
self.base.init(Id.Decls, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -199,21 +211,16 @@ pub const Scope = struct {
|
||||
|
||||
/// Creates a Block scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*Block {
|
||||
const self = try comp.gpa().create(Block{
|
||||
.base = Scope{
|
||||
.id = Id.Block,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
const self = try comp.gpa().createOne(Block);
|
||||
self.* = Block{
|
||||
.base = undefined,
|
||||
.incoming_values = undefined,
|
||||
.incoming_blocks = undefined,
|
||||
.end_block = undefined,
|
||||
.is_comptime = undefined,
|
||||
.safety = Safety.Auto,
|
||||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
parent.ref();
|
||||
};
|
||||
self.base.init(Id.Block, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -226,22 +233,17 @@ pub const Scope = struct {
|
||||
base: Scope,
|
||||
|
||||
/// This reference is not counted so that the scope can get destroyed with the function
|
||||
fn_val: *Value.Fn,
|
||||
fn_val: ?*Value.Fn,
|
||||
|
||||
/// Creates a FnDef scope with 1 reference
|
||||
/// Must set the fn_val later
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*FnDef {
|
||||
const self = try comp.gpa().create(FnDef{
|
||||
.base = Scope{
|
||||
.id = Id.FnDef,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
.fn_val = undefined,
|
||||
});
|
||||
|
||||
parent.ref();
|
||||
|
||||
const self = try comp.gpa().createOne(FnDef);
|
||||
self.* = FnDef{
|
||||
.base = undefined,
|
||||
.fn_val = null,
|
||||
};
|
||||
self.base.init(Id.FnDef, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -255,15 +257,9 @@ pub const Scope = struct {
|
||||
|
||||
/// Creates a CompTime scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*CompTime {
|
||||
const self = try comp.gpa().create(CompTime{
|
||||
.base = Scope{
|
||||
.id = Id.CompTime,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
});
|
||||
|
||||
parent.ref();
|
||||
const self = try comp.gpa().createOne(CompTime);
|
||||
self.* = CompTime{ .base = undefined };
|
||||
self.base.init(Id.CompTime, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -289,20 +285,14 @@ pub const Scope = struct {
|
||||
kind: Kind,
|
||||
defer_expr_scope: *DeferExpr,
|
||||
) !*Defer {
|
||||
const self = try comp.gpa().create(Defer{
|
||||
.base = Scope{
|
||||
.id = Id.Defer,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
const self = try comp.gpa().createOne(Defer);
|
||||
self.* = Defer{
|
||||
.base = undefined,
|
||||
.defer_expr_scope = defer_expr_scope,
|
||||
.kind = kind,
|
||||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
};
|
||||
self.base.init(Id.Defer, parent);
|
||||
defer_expr_scope.base.ref();
|
||||
|
||||
parent.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -319,18 +309,13 @@ pub const Scope = struct {
|
||||
|
||||
/// Creates a DeferExpr scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: *Scope, expr_node: *ast.Node) !*DeferExpr {
|
||||
const self = try comp.gpa().create(DeferExpr{
|
||||
.base = Scope{
|
||||
.id = Id.DeferExpr,
|
||||
.parent = parent,
|
||||
.ref_count = 1,
|
||||
},
|
||||
const self = try comp.gpa().createOne(DeferExpr);
|
||||
self.* = DeferExpr{
|
||||
.base = undefined,
|
||||
.expr_node = expr_node,
|
||||
.reported_err = false,
|
||||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
parent.ref();
|
||||
};
|
||||
self.base.init(Id.DeferExpr, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -338,4 +323,74 @@ pub const Scope = struct {
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Var = struct {
|
||||
base: Scope,
|
||||
name: []const u8,
|
||||
src_node: *ast.Node,
|
||||
data: Data,
|
||||
|
||||
pub const Data = union(enum) {
|
||||
Param: Param,
|
||||
Const: *Value,
|
||||
};
|
||||
|
||||
pub const Param = struct {
|
||||
index: usize,
|
||||
typ: *Type,
|
||||
llvm_value: llvm.ValueRef,
|
||||
};
|
||||
|
||||
pub fn createParam(
|
||||
comp: *Compilation,
|
||||
parent: *Scope,
|
||||
name: []const u8,
|
||||
src_node: *ast.Node,
|
||||
param_index: usize,
|
||||
param_type: *Type,
|
||||
) !*Var {
|
||||
const self = try create(comp, parent, name, src_node);
|
||||
self.data = Data{
|
||||
.Param = Param{
|
||||
.index = param_index,
|
||||
.typ = param_type,
|
||||
.llvm_value = undefined,
|
||||
},
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createConst(
|
||||
comp: *Compilation,
|
||||
parent: *Scope,
|
||||
name: []const u8,
|
||||
src_node: *ast.Node,
|
||||
value: *Value,
|
||||
) !*Var {
|
||||
const self = try create(comp, parent, name, src_node);
|
||||
self.data = Data{ .Const = value };
|
||||
value.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
fn create(comp: *Compilation, parent: *Scope, name: []const u8, src_node: *ast.Node) !*Var {
|
||||
const self = try comp.gpa().createOne(Var);
|
||||
self.* = Var{
|
||||
.base = undefined,
|
||||
.name = name,
|
||||
.src_node = src_node,
|
||||
.data = undefined,
|
||||
};
|
||||
self.base.init(Id.Var, parent);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Var, comp: *Compilation) void {
|
||||
switch (self.data) {
|
||||
Data.Param => {},
|
||||
Data.Const => |value| value.deref(comp),
|
||||
}
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -141,9 +141,13 @@ pub const Type = struct {
|
||||
Id.Promise,
|
||||
=> return true,
|
||||
|
||||
Id.Pointer => {
|
||||
const ptr_type = @fieldParentPtr(Pointer, "base", base);
|
||||
return ptr_type.key.child_type.hasBits();
|
||||
},
|
||||
|
||||
Id.ErrorSet => @panic("TODO"),
|
||||
Id.Enum => @panic("TODO"),
|
||||
Id.Pointer => @panic("TODO"),
|
||||
Id.Struct => @panic("TODO"),
|
||||
Id.Array => @panic("TODO"),
|
||||
Id.Optional => @panic("TODO"),
|
||||
@ -221,57 +225,294 @@ pub const Type = struct {
|
||||
|
||||
pub const Fn = struct {
|
||||
base: Type,
|
||||
return_type: *Type,
|
||||
params: []Param,
|
||||
is_var_args: bool,
|
||||
key: Key,
|
||||
non_key: NonKey,
|
||||
garbage_node: std.atomic.Stack(*Fn).Node,
|
||||
|
||||
pub const Kind = enum {
|
||||
Normal,
|
||||
Generic,
|
||||
};
|
||||
|
||||
pub const NonKey = union {
|
||||
Normal: Normal,
|
||||
Generic: void,
|
||||
|
||||
pub const Normal = struct {
|
||||
variable_list: std.ArrayList(*Scope.Var),
|
||||
};
|
||||
};
|
||||
|
||||
pub const Key = struct {
|
||||
data: Data,
|
||||
alignment: ?u32,
|
||||
|
||||
pub const Data = union(Kind) {
|
||||
Generic: Generic,
|
||||
Normal: Normal,
|
||||
};
|
||||
|
||||
pub const Normal = struct {
|
||||
params: []Param,
|
||||
return_type: *Type,
|
||||
is_var_args: bool,
|
||||
cc: CallingConvention,
|
||||
};
|
||||
|
||||
pub const Generic = struct {
|
||||
param_count: usize,
|
||||
cc: CC,
|
||||
|
||||
pub const CC = union(CallingConvention) {
|
||||
Auto,
|
||||
C,
|
||||
Cold,
|
||||
Naked,
|
||||
Stdcall,
|
||||
Async: *Type, // allocator type
|
||||
};
|
||||
};
|
||||
|
||||
pub fn hash(self: *const Key) u32 {
|
||||
var result: u32 = 0;
|
||||
result +%= hashAny(self.alignment, 0);
|
||||
switch (self.data) {
|
||||
Kind.Generic => |generic| {
|
||||
result +%= hashAny(generic.param_count, 1);
|
||||
switch (generic.cc) {
|
||||
CallingConvention.Async => |allocator_type| result +%= hashAny(allocator_type, 2),
|
||||
else => result +%= hashAny(CallingConvention(generic.cc), 3),
|
||||
}
|
||||
},
|
||||
Kind.Normal => |normal| {
|
||||
result +%= hashAny(normal.return_type, 4);
|
||||
result +%= hashAny(normal.is_var_args, 5);
|
||||
result +%= hashAny(normal.cc, 6);
|
||||
for (normal.params) |param| {
|
||||
result +%= hashAny(param.is_noalias, 7);
|
||||
result +%= hashAny(param.typ, 8);
|
||||
}
|
||||
},
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn eql(self: *const Key, other: *const Key) bool {
|
||||
if ((self.alignment == null) != (other.alignment == null)) return false;
|
||||
if (self.alignment) |self_align| {
|
||||
if (self_align != other.alignment.?) return false;
|
||||
}
|
||||
if (@TagType(Data)(self.data) != @TagType(Data)(other.data)) return false;
|
||||
switch (self.data) {
|
||||
Kind.Generic => |*self_generic| {
|
||||
const other_generic = &other.data.Generic;
|
||||
if (self_generic.param_count != other_generic.param_count) return false;
|
||||
if (CallingConvention(self_generic.cc) != CallingConvention(other_generic.cc)) return false;
|
||||
switch (self_generic.cc) {
|
||||
CallingConvention.Async => |self_allocator_type| {
|
||||
const other_allocator_type = other_generic.cc.Async;
|
||||
if (self_allocator_type != other_allocator_type) return false;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
Kind.Normal => |*self_normal| {
|
||||
const other_normal = &other.data.Normal;
|
||||
if (self_normal.cc != other_normal.cc) return false;
|
||||
if (self_normal.is_var_args != other_normal.is_var_args) return false;
|
||||
if (self_normal.return_type != other_normal.return_type) return false;
|
||||
for (self_normal.params) |*self_param, i| {
|
||||
const other_param = &other_normal.params[i];
|
||||
if (self_param.is_noalias != other_param.is_noalias) return false;
|
||||
if (self_param.typ != other_param.typ) return false;
|
||||
}
|
||||
},
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn deref(key: Key, comp: *Compilation) void {
|
||||
switch (key.data) {
|
||||
Kind.Generic => |generic| {
|
||||
switch (generic.cc) {
|
||||
CallingConvention.Async => |allocator_type| allocator_type.base.deref(comp),
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
Kind.Normal => |normal| {
|
||||
normal.return_type.base.deref(comp);
|
||||
for (normal.params) |param| {
|
||||
param.typ.base.deref(comp);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ref(key: Key) void {
|
||||
switch (key.data) {
|
||||
Kind.Generic => |generic| {
|
||||
switch (generic.cc) {
|
||||
CallingConvention.Async => |allocator_type| allocator_type.base.ref(),
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
Kind.Normal => |normal| {
|
||||
normal.return_type.base.ref();
|
||||
for (normal.params) |param| {
|
||||
param.typ.base.ref();
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const CallingConvention = enum {
|
||||
Auto,
|
||||
C,
|
||||
Cold,
|
||||
Naked,
|
||||
Stdcall,
|
||||
Async,
|
||||
};
|
||||
|
||||
pub const Param = struct {
|
||||
is_noalias: bool,
|
||||
typ: *Type,
|
||||
};
|
||||
|
||||
pub fn create(comp: *Compilation, return_type: *Type, params: []Param, is_var_args: bool) !*Fn {
|
||||
const result = try comp.gpa().create(Fn{
|
||||
.base = undefined,
|
||||
.return_type = return_type,
|
||||
.params = params,
|
||||
.is_var_args = is_var_args,
|
||||
});
|
||||
errdefer comp.gpa().destroy(result);
|
||||
fn ccFnTypeStr(cc: CallingConvention) []const u8 {
|
||||
return switch (cc) {
|
||||
CallingConvention.Auto => "",
|
||||
CallingConvention.C => "extern ",
|
||||
CallingConvention.Cold => "coldcc ",
|
||||
CallingConvention.Naked => "nakedcc ",
|
||||
CallingConvention.Stdcall => "stdcallcc ",
|
||||
CallingConvention.Async => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
result.base.init(comp, Id.Fn, "TODO fn type name");
|
||||
pub fn paramCount(self: *Fn) usize {
|
||||
return switch (self.key.data) {
|
||||
Kind.Generic => |generic| generic.param_count,
|
||||
Kind.Normal => |normal| normal.params.len,
|
||||
};
|
||||
}
|
||||
|
||||
result.return_type.base.ref();
|
||||
for (result.params) |param| {
|
||||
param.typ.base.ref();
|
||||
/// takes ownership of key.Normal.params on success
|
||||
pub async fn get(comp: *Compilation, key: Key) !*Fn {
|
||||
{
|
||||
const held = await (async comp.fn_type_table.acquire() catch unreachable);
|
||||
defer held.release();
|
||||
|
||||
if (held.value.get(&key)) |entry| {
|
||||
entry.value.base.base.ref();
|
||||
return entry.value;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
||||
key.ref();
|
||||
errdefer key.deref(comp);
|
||||
|
||||
const self = try comp.gpa().createOne(Fn);
|
||||
self.* = Fn{
|
||||
.base = undefined,
|
||||
.key = key,
|
||||
.non_key = undefined,
|
||||
.garbage_node = undefined,
|
||||
};
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
var name_buf = try std.Buffer.initSize(comp.gpa(), 0);
|
||||
defer name_buf.deinit();
|
||||
|
||||
const name_stream = &std.io.BufferOutStream.init(&name_buf).stream;
|
||||
|
||||
switch (key.data) {
|
||||
Kind.Generic => |generic| {
|
||||
self.non_key = NonKey{ .Generic = {} };
|
||||
switch (generic.cc) {
|
||||
CallingConvention.Async => |async_allocator_type| {
|
||||
try name_stream.print("async<{}> ", async_allocator_type.name);
|
||||
},
|
||||
else => {
|
||||
const cc_str = ccFnTypeStr(generic.cc);
|
||||
try name_stream.write(cc_str);
|
||||
},
|
||||
}
|
||||
try name_stream.write("fn(");
|
||||
var param_i: usize = 0;
|
||||
while (param_i < generic.param_count) : (param_i += 1) {
|
||||
const arg = if (param_i == 0) "var" else ", var";
|
||||
try name_stream.write(arg);
|
||||
}
|
||||
try name_stream.write(")");
|
||||
if (key.alignment) |alignment| {
|
||||
try name_stream.print(" align<{}>", alignment);
|
||||
}
|
||||
try name_stream.write(" var");
|
||||
},
|
||||
Kind.Normal => |normal| {
|
||||
self.non_key = NonKey{
|
||||
.Normal = NonKey.Normal{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
|
||||
};
|
||||
const cc_str = ccFnTypeStr(normal.cc);
|
||||
try name_stream.print("{}fn(", cc_str);
|
||||
for (normal.params) |param, i| {
|
||||
if (i != 0) try name_stream.write(", ");
|
||||
if (param.is_noalias) try name_stream.write("noalias ");
|
||||
try name_stream.write(param.typ.name);
|
||||
}
|
||||
if (normal.is_var_args) {
|
||||
if (normal.params.len != 0) try name_stream.write(", ");
|
||||
try name_stream.write("...");
|
||||
}
|
||||
try name_stream.write(")");
|
||||
if (key.alignment) |alignment| {
|
||||
try name_stream.print(" align<{}>", alignment);
|
||||
}
|
||||
try name_stream.print(" {}", normal.return_type.name);
|
||||
},
|
||||
}
|
||||
|
||||
self.base.init(comp, Id.Fn, name_buf.toOwnedSlice());
|
||||
|
||||
{
|
||||
const held = await (async comp.fn_type_table.acquire() catch unreachable);
|
||||
defer held.release();
|
||||
|
||||
_ = try held.value.put(&self.key, self);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Fn, comp: *Compilation) void {
|
||||
self.return_type.base.deref(comp);
|
||||
for (self.params) |param| {
|
||||
param.typ.base.deref(comp);
|
||||
self.key.deref(comp);
|
||||
switch (self.key.data) {
|
||||
Kind.Generic => {},
|
||||
Kind.Normal => {
|
||||
self.non_key.Normal.variable_list.deinit();
|
||||
},
|
||||
}
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
pub fn getLlvmType(self: *Fn, allocator: *Allocator, llvm_context: llvm.ContextRef) !llvm.TypeRef {
|
||||
const llvm_return_type = switch (self.return_type.id) {
|
||||
const normal = &self.key.data.Normal;
|
||||
const llvm_return_type = switch (normal.return_type.id) {
|
||||
Type.Id.Void => llvm.VoidTypeInContext(llvm_context) orelse return error.OutOfMemory,
|
||||
else => try self.return_type.getLlvmType(allocator, llvm_context),
|
||||
else => try normal.return_type.getLlvmType(allocator, llvm_context),
|
||||
};
|
||||
const llvm_param_types = try allocator.alloc(llvm.TypeRef, self.params.len);
|
||||
const llvm_param_types = try allocator.alloc(llvm.TypeRef, normal.params.len);
|
||||
defer allocator.free(llvm_param_types);
|
||||
for (llvm_param_types) |*llvm_param_type, i| {
|
||||
llvm_param_type.* = try self.params[i].typ.getLlvmType(allocator, llvm_context);
|
||||
llvm_param_type.* = try normal.params[i].typ.getLlvmType(allocator, llvm_context);
|
||||
}
|
||||
|
||||
return llvm.FunctionType(
|
||||
llvm_return_type,
|
||||
llvm_param_types.ptr,
|
||||
@intCast(c_uint, llvm_param_types.len),
|
||||
@boolToInt(self.is_var_args),
|
||||
@boolToInt(normal.is_var_args),
|
||||
) orelse error.OutOfMemory;
|
||||
}
|
||||
};
|
||||
@ -347,8 +588,10 @@ pub const Type = struct {
|
||||
is_signed: bool,
|
||||
|
||||
pub fn hash(self: *const Key) u32 {
|
||||
const rands = [2]u32{ 0xa4ba6498, 0x75fc5af7 };
|
||||
return rands[@boolToInt(self.is_signed)] *% self.bit_count;
|
||||
var result: u32 = 0;
|
||||
result +%= hashAny(self.is_signed, 0);
|
||||
result +%= hashAny(self.bit_count, 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn eql(self: *const Key, other: *const Key) bool {
|
||||
@ -443,15 +686,16 @@ pub const Type = struct {
|
||||
alignment: Align,
|
||||
|
||||
pub fn hash(self: *const Key) u32 {
|
||||
const align_hash = switch (self.alignment) {
|
||||
var result: u32 = 0;
|
||||
result +%= switch (self.alignment) {
|
||||
Align.Abi => 0xf201c090,
|
||||
Align.Override => |x| x,
|
||||
Align.Override => |x| hashAny(x, 0),
|
||||
};
|
||||
return hash_usize(@ptrToInt(self.child_type)) *%
|
||||
hash_enum(self.mut) *%
|
||||
hash_enum(self.vol) *%
|
||||
hash_enum(self.size) *%
|
||||
align_hash;
|
||||
result +%= hashAny(self.child_type, 1);
|
||||
result +%= hashAny(self.mut, 2);
|
||||
result +%= hashAny(self.vol, 3);
|
||||
result +%= hashAny(self.size, 4);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn eql(self: *const Key, other: *const Key) bool {
|
||||
@ -605,7 +849,10 @@ pub const Type = struct {
|
||||
len: usize,
|
||||
|
||||
pub fn hash(self: *const Key) u32 {
|
||||
return hash_usize(@ptrToInt(self.elem_type)) *% hash_usize(self.len);
|
||||
var result: u32 = 0;
|
||||
result +%= hashAny(self.elem_type, 0);
|
||||
result +%= hashAny(self.len, 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn eql(self: *const Key, other: *const Key) bool {
|
||||
@ -818,27 +1065,37 @@ pub const Type = struct {
|
||||
};
|
||||
};
|
||||
|
||||
fn hash_usize(x: usize) u32 {
|
||||
return switch (@sizeOf(usize)) {
|
||||
4 => x,
|
||||
8 => @truncate(u32, x *% 0xad44ee2d8e3fc13d),
|
||||
else => @compileError("implement this hash function"),
|
||||
};
|
||||
}
|
||||
|
||||
fn hash_enum(x: var) u32 {
|
||||
const rands = []u32{
|
||||
0x85ebf64f,
|
||||
0x3fcb3211,
|
||||
0x240a4e8e,
|
||||
0x40bb0e3c,
|
||||
0x78be45af,
|
||||
0x1ca98e37,
|
||||
0xec56053a,
|
||||
0x906adc48,
|
||||
0xd4fe9763,
|
||||
0x54c80dac,
|
||||
};
|
||||
comptime assert(@memberCount(@typeOf(x)) < rands.len);
|
||||
return rands[@enumToInt(x)];
|
||||
fn hashAny(x: var, comptime seed: u64) u32 {
|
||||
switch (@typeInfo(@typeOf(x))) {
|
||||
builtin.TypeId.Int => |info| {
|
||||
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
|
||||
const unsigned_x = @bitCast(@IntType(false, info.bits), x);
|
||||
if (info.bits <= 32) {
|
||||
return u32(unsigned_x) *% comptime rng.random.scalar(u32);
|
||||
} else {
|
||||
return @truncate(u32, unsigned_x *% comptime rng.random.scalar(@typeOf(unsigned_x)));
|
||||
}
|
||||
},
|
||||
builtin.TypeId.Pointer => |info| {
|
||||
switch (info.size) {
|
||||
builtin.TypeInfo.Pointer.Size.One => return hashAny(@ptrToInt(x), seed),
|
||||
builtin.TypeInfo.Pointer.Size.Many => @compileError("implement hash function"),
|
||||
builtin.TypeInfo.Pointer.Size.Slice => @compileError("implement hash function"),
|
||||
}
|
||||
},
|
||||
builtin.TypeId.Enum => return hashAny(@enumToInt(x), seed),
|
||||
builtin.TypeId.Bool => {
|
||||
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
|
||||
const vals = comptime [2]u32{ rng.random.scalar(u32), rng.random.scalar(u32) };
|
||||
return vals[@boolToInt(x)];
|
||||
},
|
||||
builtin.TypeId.Optional => {
|
||||
if (x) |non_opt| {
|
||||
return hashAny(non_opt, seed);
|
||||
} else {
|
||||
return hashAny(u32(1), seed);
|
||||
}
|
||||
},
|
||||
else => @compileError("implement hash function for " ++ @typeName(@typeOf(x))),
|
||||
}
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ pub const Value = struct {
|
||||
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error{OutOfMemory}!?llvm.ValueRef) {
|
||||
switch (base.id) {
|
||||
Id.Type => unreachable,
|
||||
Id.Fn => @panic("TODO"),
|
||||
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
|
||||
Id.FnProto => return @fieldParentPtr(FnProto, "base", base).getLlvmConst(ofile),
|
||||
Id.Void => return null,
|
||||
Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
|
||||
@ -180,7 +180,7 @@ pub const Value = struct {
|
||||
child_scope: *Scope,
|
||||
|
||||
/// parent is child_scope
|
||||
block_scope: *Scope.Block,
|
||||
block_scope: ?*Scope.Block,
|
||||
|
||||
/// Path to the object file that contains this function
|
||||
containing_object: Buffer,
|
||||
@ -205,7 +205,7 @@ pub const Value = struct {
|
||||
},
|
||||
.fndef_scope = fndef_scope,
|
||||
.child_scope = &fndef_scope.base,
|
||||
.block_scope = undefined,
|
||||
.block_scope = null,
|
||||
.symbol_name = symbol_name,
|
||||
.containing_object = Buffer.initNull(comp.gpa()),
|
||||
.link_set_node = link_set_node,
|
||||
@ -231,6 +231,22 @@ pub const Value = struct {
|
||||
self.symbol_name.deinit();
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
/// We know that the function definition will end up in an .o file somewhere.
|
||||
/// Here, all we have to do is generate a global prototype.
|
||||
/// TODO cache the prototype per ObjectFile
|
||||
pub fn getLlvmConst(self: *Fn, ofile: *ObjectFile) !?llvm.ValueRef {
|
||||
const llvm_fn_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
const llvm_fn = llvm.AddFunction(
|
||||
ofile.module,
|
||||
self.symbol_name.ptr(),
|
||||
llvm_fn_type,
|
||||
) orelse return error.OutOfMemory;
|
||||
|
||||
// TODO port more logic from codegen.cpp:fn_llvm_value
|
||||
|
||||
return llvm_fn;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Void = struct {
|
||||
|
@ -60,7 +60,7 @@ struct IrExecutable {
|
||||
ZigList<Tld *> tld_list;
|
||||
|
||||
IrInstruction *coro_handle;
|
||||
IrInstruction *coro_awaiter_field_ptr; // this one is shared and in the promise
|
||||
IrInstruction *atomic_state_field_ptr; // this one is shared and in the promise
|
||||
IrInstruction *coro_result_ptr_field_ptr;
|
||||
IrInstruction *coro_result_field_ptr;
|
||||
IrInstruction *await_handle_var_ptr; // this one is where we put the one we extracted from the promise
|
||||
@ -898,7 +898,6 @@ struct AstNodeAwaitExpr {
|
||||
};
|
||||
|
||||
struct AstNodeSuspend {
|
||||
Buf *name;
|
||||
AstNode *block;
|
||||
AstNode *promise_symbol;
|
||||
};
|
||||
@ -1927,7 +1926,6 @@ struct ScopeLoop {
|
||||
struct ScopeSuspend {
|
||||
Scope base;
|
||||
|
||||
Buf *name;
|
||||
IrBasicBlock *resume_block;
|
||||
bool reported_err;
|
||||
};
|
||||
@ -3243,7 +3241,7 @@ static const size_t stack_trace_ptr_count = 30;
|
||||
#define RESULT_FIELD_NAME "result"
|
||||
#define ASYNC_ALLOC_FIELD_NAME "allocFn"
|
||||
#define ASYNC_FREE_FIELD_NAME "freeFn"
|
||||
#define AWAITER_HANDLE_FIELD_NAME "awaiter_handle"
|
||||
#define ATOMIC_STATE_FIELD_NAME "atomic_state"
|
||||
// these point to data belonging to the awaiter
|
||||
#define ERR_RET_TRACE_PTR_FIELD_NAME "err_ret_trace_ptr"
|
||||
#define RESULT_PTR_FIELD_NAME "result_ptr"
|
||||
|
@ -161,7 +161,6 @@ ScopeSuspend *create_suspend_scope(AstNode *node, Scope *parent) {
|
||||
assert(node->type == NodeTypeSuspend);
|
||||
ScopeSuspend *scope = allocate<ScopeSuspend>(1);
|
||||
init_scope(&scope->base, ScopeIdSuspend, node, parent);
|
||||
scope->name = node->data.suspend.name;
|
||||
return scope;
|
||||
}
|
||||
|
||||
@ -519,11 +518,11 @@ TypeTableEntry *get_promise_frame_type(CodeGen *g, TypeTableEntry *return_type)
|
||||
return return_type->promise_frame_parent;
|
||||
}
|
||||
|
||||
TypeTableEntry *awaiter_handle_type = get_optional_type(g, g->builtin_types.entry_promise);
|
||||
TypeTableEntry *atomic_state_type = g->builtin_types.entry_usize;
|
||||
TypeTableEntry *result_ptr_type = get_pointer_to_type(g, return_type, false);
|
||||
|
||||
ZigList<const char *> field_names = {};
|
||||
field_names.append(AWAITER_HANDLE_FIELD_NAME);
|
||||
field_names.append(ATOMIC_STATE_FIELD_NAME);
|
||||
field_names.append(RESULT_FIELD_NAME);
|
||||
field_names.append(RESULT_PTR_FIELD_NAME);
|
||||
if (g->have_err_ret_tracing) {
|
||||
@ -533,7 +532,7 @@ TypeTableEntry *get_promise_frame_type(CodeGen *g, TypeTableEntry *return_type)
|
||||
}
|
||||
|
||||
ZigList<TypeTableEntry *> field_types = {};
|
||||
field_types.append(awaiter_handle_type);
|
||||
field_types.append(atomic_state_type);
|
||||
field_types.append(return_type);
|
||||
field_types.append(result_ptr_type);
|
||||
if (g->have_err_ret_tracing) {
|
||||
@ -1585,10 +1584,6 @@ static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *c
|
||||
case TypeTableEntryIdBlock:
|
||||
case TypeTableEntryIdBoundFn:
|
||||
case TypeTableEntryIdMetaType:
|
||||
add_node_error(g, param_node->data.param_decl.type,
|
||||
buf_sprintf("parameter of type '%s' must be declared comptime",
|
||||
buf_ptr(&type_entry->name)));
|
||||
return g->builtin_types.entry_invalid;
|
||||
case TypeTableEntryIdVoid:
|
||||
case TypeTableEntryIdBool:
|
||||
case TypeTableEntryIdInt:
|
||||
@ -1603,6 +1598,13 @@ static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *c
|
||||
case TypeTableEntryIdUnion:
|
||||
case TypeTableEntryIdFn:
|
||||
case TypeTableEntryIdPromise:
|
||||
type_ensure_zero_bits_known(g, type_entry);
|
||||
if (type_requires_comptime(type_entry)) {
|
||||
add_node_error(g, param_node->data.param_decl.type,
|
||||
buf_sprintf("parameter of type '%s' must be declared comptime",
|
||||
buf_ptr(&type_entry->name)));
|
||||
return g->builtin_types.entry_invalid;
|
||||
}
|
||||
break;
|
||||
}
|
||||
FnTypeParamInfo *param_info = &fn_type_id.param_info[fn_type_id.next_param_index];
|
||||
@ -3938,7 +3940,7 @@ AstNode *get_param_decl_node(FnTableEntry *fn_entry, size_t index) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static void define_local_param_variables(CodeGen *g, FnTableEntry *fn_table_entry, VariableTableEntry **arg_vars) {
|
||||
static void define_local_param_variables(CodeGen *g, FnTableEntry *fn_table_entry) {
|
||||
TypeTableEntry *fn_type = fn_table_entry->type_entry;
|
||||
assert(!fn_type->data.fn.is_generic);
|
||||
FnTypeId *fn_type_id = &fn_type->data.fn.fn_type_id;
|
||||
@ -3976,10 +3978,6 @@ static void define_local_param_variables(CodeGen *g, FnTableEntry *fn_table_entr
|
||||
if (fn_type->data.fn.gen_param_info) {
|
||||
var->gen_arg_index = fn_type->data.fn.gen_param_info[i].gen_index;
|
||||
}
|
||||
|
||||
if (arg_vars) {
|
||||
arg_vars[i] = var;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -4057,7 +4055,7 @@ void analyze_fn_ir(CodeGen *g, FnTableEntry *fn_table_entry, AstNode *return_typ
|
||||
}
|
||||
|
||||
if (g->verbose_ir) {
|
||||
fprintf(stderr, "{ // (analyzed)\n");
|
||||
fprintf(stderr, "fn %s() { // (analyzed)\n", buf_ptr(&fn_table_entry->symbol_name));
|
||||
ir_print(g, stderr, &fn_table_entry->analyzed_executable, 4);
|
||||
fprintf(stderr, "}\n");
|
||||
}
|
||||
@ -4079,7 +4077,7 @@ static void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry) {
|
||||
if (!fn_table_entry->child_scope)
|
||||
fn_table_entry->child_scope = &fn_table_entry->fndef_scope->base;
|
||||
|
||||
define_local_param_variables(g, fn_table_entry, nullptr);
|
||||
define_local_param_variables(g, fn_table_entry);
|
||||
|
||||
TypeTableEntry *fn_type = fn_table_entry->type_entry;
|
||||
assert(!fn_type->data.fn.is_generic);
|
||||
@ -5019,9 +5017,10 @@ bool type_requires_comptime(TypeTableEntry *type_entry) {
|
||||
} else {
|
||||
return type_requires_comptime(type_entry->data.pointer.child_type);
|
||||
}
|
||||
case TypeTableEntryIdFn:
|
||||
return type_entry->data.fn.is_generic;
|
||||
case TypeTableEntryIdEnum:
|
||||
case TypeTableEntryIdErrorSet:
|
||||
case TypeTableEntryIdFn:
|
||||
case TypeTableEntryIdBool:
|
||||
case TypeTableEntryIdInt:
|
||||
case TypeTableEntryIdFloat:
|
||||
@ -6228,7 +6227,12 @@ uint32_t get_abi_alignment(CodeGen *g, TypeTableEntry *type_entry) {
|
||||
} else if (type_entry->id == TypeTableEntryIdOpaque) {
|
||||
return 1;
|
||||
} else {
|
||||
return LLVMABIAlignmentOfType(g->target_data_ref, type_entry->type_ref);
|
||||
uint32_t llvm_alignment = LLVMABIAlignmentOfType(g->target_data_ref, type_entry->type_ref);
|
||||
// promises have at least alignment 8 so that we can have 3 extra bits when doing atomicrmw
|
||||
if (type_entry->id == TypeTableEntryIdPromise && llvm_alignment < 8) {
|
||||
return 8;
|
||||
}
|
||||
return llvm_alignment;
|
||||
}
|
||||
}
|
||||
|
||||
|
464
src/ir.cpp
464
src/ir.cpp
@ -3097,20 +3097,47 @@ static IrInstruction *ir_gen_async_return(IrBuilder *irb, Scope *scope, AstNode
|
||||
return return_inst;
|
||||
}
|
||||
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->coro_result_field_ptr, return_value);
|
||||
IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node,
|
||||
get_optional_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
|
||||
// TODO replace replacement_value with @intToPtr(?promise, 0x1) when it doesn't crash zig
|
||||
IrInstruction *replacement_value = irb->exec->coro_handle;
|
||||
IrInstruction *maybe_await_handle = ir_build_atomic_rmw(irb, scope, node,
|
||||
promise_type_val, irb->exec->coro_awaiter_field_ptr, nullptr, replacement_value, nullptr,
|
||||
AtomicRmwOp_xchg, AtomicOrderSeqCst);
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->await_handle_var_ptr, maybe_await_handle);
|
||||
IrInstruction *is_non_null = ir_build_test_nonnull(irb, scope, node, maybe_await_handle);
|
||||
IrBasicBlock *suspended_block = ir_create_basic_block(irb, scope, "Suspended");
|
||||
IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, scope, "NotSuspended");
|
||||
IrBasicBlock *store_awaiter_block = ir_create_basic_block(irb, scope, "StoreAwaiter");
|
||||
IrBasicBlock *check_canceled_block = ir_create_basic_block(irb, scope, "CheckCanceled");
|
||||
|
||||
IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
|
||||
IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
|
||||
IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
|
||||
IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
|
||||
IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_promise);
|
||||
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
|
||||
return ir_build_cond_br(irb, scope, node, is_non_null, irb->exec->coro_normal_final, irb->exec->coro_early_final,
|
||||
is_comptime);
|
||||
// the above blocks are rendered by ir_gen after the rest of codegen
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->coro_result_field_ptr, return_value);
|
||||
IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
|
||||
IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, ptr_mask, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
|
||||
IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
|
||||
IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_suspended_bool, suspended_block, not_suspended_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, suspended_block);
|
||||
ir_build_unreachable(irb, scope, node);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
|
||||
IrInstruction *await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
|
||||
// if we ever add null checking safety to the ptrtoint instruction, it needs to be disabled here
|
||||
IrInstruction *have_await_handle = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, have_await_handle, store_awaiter_block, check_canceled_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, store_awaiter_block);
|
||||
IrInstruction *await_handle = ir_build_int_to_ptr(irb, scope, node, promise_type_val, await_handle_addr);
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->await_handle_var_ptr, await_handle);
|
||||
ir_build_br(irb, scope, node, irb->exec->coro_normal_final, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, check_canceled_block);
|
||||
IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
|
||||
return ir_build_cond_br(irb, scope, node, is_canceled_bool, irb->exec->coro_final_cleanup_block, irb->exec->coro_early_final, is_comptime);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_return(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
|
||||
@ -5251,8 +5278,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
|
||||
if (body_result == irb->codegen->invalid_instruction)
|
||||
return body_result;
|
||||
|
||||
if (!instr_is_unreachable(body_result))
|
||||
if (!instr_is_unreachable(body_result)) {
|
||||
ir_mark_gen(ir_build_check_statement_is_void(irb, payload_scope, node->data.while_expr.body, body_result));
|
||||
ir_mark_gen(ir_build_br(irb, payload_scope, node, continue_block, is_comptime));
|
||||
}
|
||||
|
||||
if (continue_expr_node) {
|
||||
ir_set_cursor_at_end_and_append_block(irb, continue_block);
|
||||
@ -5331,8 +5360,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
|
||||
if (body_result == irb->codegen->invalid_instruction)
|
||||
return body_result;
|
||||
|
||||
if (!instr_is_unreachable(body_result))
|
||||
if (!instr_is_unreachable(body_result)) {
|
||||
ir_mark_gen(ir_build_check_statement_is_void(irb, child_scope, node->data.while_expr.body, body_result));
|
||||
ir_mark_gen(ir_build_br(irb, child_scope, node, continue_block, is_comptime));
|
||||
}
|
||||
|
||||
if (continue_expr_node) {
|
||||
ir_set_cursor_at_end_and_append_block(irb, continue_block);
|
||||
@ -5392,8 +5423,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
|
||||
if (body_result == irb->codegen->invalid_instruction)
|
||||
return body_result;
|
||||
|
||||
if (!instr_is_unreachable(body_result))
|
||||
if (!instr_is_unreachable(body_result)) {
|
||||
ir_mark_gen(ir_build_check_statement_is_void(irb, scope, node->data.while_expr.body, body_result));
|
||||
ir_mark_gen(ir_build_br(irb, scope, node, continue_block, is_comptime));
|
||||
}
|
||||
|
||||
if (continue_expr_node) {
|
||||
ir_set_cursor_at_end_and_append_block(irb, continue_block);
|
||||
@ -6153,15 +6186,6 @@ static IrInstruction *ir_gen_return_from_block(IrBuilder *irb, Scope *break_scop
|
||||
return ir_build_br(irb, break_scope, node, dest_block, is_comptime);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_break_from_suspend(IrBuilder *irb, Scope *break_scope, AstNode *node, ScopeSuspend *suspend_scope) {
|
||||
IrInstruction *is_comptime = ir_build_const_bool(irb, break_scope, node, false);
|
||||
|
||||
IrBasicBlock *dest_block = suspend_scope->resume_block;
|
||||
ir_gen_defers_for_block(irb, break_scope, dest_block->scope, false);
|
||||
|
||||
return ir_build_br(irb, break_scope, node, dest_block, is_comptime);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_break(IrBuilder *irb, Scope *break_scope, AstNode *node) {
|
||||
assert(node->type == NodeTypeBreak);
|
||||
|
||||
@ -6202,12 +6226,8 @@ static IrInstruction *ir_gen_break(IrBuilder *irb, Scope *break_scope, AstNode *
|
||||
return ir_gen_return_from_block(irb, break_scope, node, this_block_scope);
|
||||
}
|
||||
} else if (search_scope->id == ScopeIdSuspend) {
|
||||
ScopeSuspend *this_suspend_scope = (ScopeSuspend *)search_scope;
|
||||
if (node->data.break_expr.name != nullptr &&
|
||||
(this_suspend_scope->name != nullptr && buf_eql_buf(node->data.break_expr.name, this_suspend_scope->name)))
|
||||
{
|
||||
return ir_gen_break_from_suspend(irb, break_scope, node, this_suspend_scope);
|
||||
}
|
||||
add_node_error(irb->codegen, node, buf_sprintf("cannot break out of suspend block"));
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
search_scope = search_scope->parent;
|
||||
}
|
||||
@ -6643,30 +6663,150 @@ static IrInstruction *ir_gen_fn_proto(IrBuilder *irb, Scope *parent_scope, AstNo
|
||||
async_allocator_type_value, is_var_args);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_cancel(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
|
||||
static IrInstruction *ir_gen_cancel_target(IrBuilder *irb, Scope *scope, AstNode *node,
|
||||
IrInstruction *target_inst, bool cancel_non_suspended, bool cancel_awaited)
|
||||
{
|
||||
IrBasicBlock *done_block = ir_create_basic_block(irb, scope, "CancelDone");
|
||||
IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
|
||||
IrBasicBlock *pre_return_block = ir_create_basic_block(irb, scope, "PreReturn");
|
||||
IrBasicBlock *post_return_block = ir_create_basic_block(irb, scope, "PostReturn");
|
||||
IrBasicBlock *do_cancel_block = ir_create_basic_block(irb, scope, "DoCancel");
|
||||
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
|
||||
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
|
||||
IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
|
||||
IrInstruction *promise_T_type_val = ir_build_const_type(irb, scope, node,
|
||||
get_promise_type(irb->codegen, irb->codegen->builtin_types.entry_void));
|
||||
IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
|
||||
IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
|
||||
IrInstruction *await_mask = ir_build_const_usize(irb, scope, node, 0x4); // 0b100
|
||||
IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
|
||||
|
||||
// TODO relies on Zig not re-ordering fields
|
||||
IrInstruction *casted_target_inst = ir_build_ptr_cast(irb, scope, node, promise_T_type_val, target_inst);
|
||||
IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, casted_target_inst);
|
||||
Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
|
||||
IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
|
||||
atomic_state_field_name);
|
||||
|
||||
// set the is_canceled bit
|
||||
IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, atomic_state_ptr, nullptr, is_canceled_mask, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
|
||||
IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_canceled_bool, done_block, not_canceled_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
|
||||
IrInstruction *awaiter_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
|
||||
IrInstruction *is_returned_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpEq, awaiter_addr, ptr_mask, false);
|
||||
ir_build_cond_br(irb, scope, node, is_returned_bool, post_return_block, pre_return_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, post_return_block);
|
||||
if (cancel_awaited) {
|
||||
ir_build_br(irb, scope, node, do_cancel_block, is_comptime);
|
||||
} else {
|
||||
IrInstruction *is_awaited_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, await_mask, false);
|
||||
IrInstruction *is_awaited_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_awaited_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_awaited_bool, done_block, do_cancel_block, is_comptime);
|
||||
}
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, pre_return_block);
|
||||
if (cancel_awaited) {
|
||||
if (cancel_non_suspended) {
|
||||
ir_build_br(irb, scope, node, do_cancel_block, is_comptime);
|
||||
} else {
|
||||
IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
|
||||
IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_suspended_bool, do_cancel_block, done_block, is_comptime);
|
||||
}
|
||||
} else {
|
||||
ir_build_br(irb, scope, node, done_block, is_comptime);
|
||||
}
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, do_cancel_block);
|
||||
ir_build_cancel(irb, scope, node, target_inst);
|
||||
ir_build_br(irb, scope, node, done_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, done_block);
|
||||
return ir_build_const_void(irb, scope, node);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_cancel(IrBuilder *irb, Scope *scope, AstNode *node) {
|
||||
assert(node->type == NodeTypeCancel);
|
||||
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.cancel_expr.expr, parent_scope);
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.cancel_expr.expr, scope);
|
||||
if (target_inst == irb->codegen->invalid_instruction)
|
||||
return irb->codegen->invalid_instruction;
|
||||
|
||||
return ir_build_cancel(irb, parent_scope, node, target_inst);
|
||||
return ir_gen_cancel_target(irb, scope, node, target_inst, false, true);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_resume(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
|
||||
static IrInstruction *ir_gen_resume_target(IrBuilder *irb, Scope *scope, AstNode *node,
|
||||
IrInstruction *target_inst)
|
||||
{
|
||||
IrBasicBlock *done_block = ir_create_basic_block(irb, scope, "ResumeDone");
|
||||
IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
|
||||
IrBasicBlock *suspended_block = ir_create_basic_block(irb, scope, "IsSuspended");
|
||||
IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, scope, "IsNotSuspended");
|
||||
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
|
||||
IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
|
||||
IrInstruction *and_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, is_suspended_mask);
|
||||
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
|
||||
IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
|
||||
IrInstruction *promise_T_type_val = ir_build_const_type(irb, scope, node,
|
||||
get_promise_type(irb->codegen, irb->codegen->builtin_types.entry_void));
|
||||
|
||||
// TODO relies on Zig not re-ordering fields
|
||||
IrInstruction *casted_target_inst = ir_build_ptr_cast(irb, scope, node, promise_T_type_val, target_inst);
|
||||
IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, casted_target_inst);
|
||||
Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
|
||||
IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
|
||||
atomic_state_field_name);
|
||||
|
||||
// clear the is_suspended bit
|
||||
IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, atomic_state_ptr, nullptr, and_mask, nullptr,
|
||||
AtomicRmwOp_and, AtomicOrderSeqCst);
|
||||
|
||||
IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_canceled_bool, done_block, not_canceled_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
|
||||
IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
|
||||
IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_suspended_bool, suspended_block, not_suspended_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
|
||||
ir_build_unreachable(irb, scope, node);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, suspended_block);
|
||||
ir_build_coro_resume(irb, scope, node, target_inst);
|
||||
ir_build_br(irb, scope, node, done_block, is_comptime);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, done_block);
|
||||
return ir_build_const_void(irb, scope, node);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_resume(IrBuilder *irb, Scope *scope, AstNode *node) {
|
||||
assert(node->type == NodeTypeResume);
|
||||
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.resume_expr.expr, parent_scope);
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.resume_expr.expr, scope);
|
||||
if (target_inst == irb->codegen->invalid_instruction)
|
||||
return irb->codegen->invalid_instruction;
|
||||
|
||||
return ir_build_coro_resume(irb, parent_scope, node, target_inst);
|
||||
return ir_gen_resume_target(irb, scope, node, target_inst);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
|
||||
static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *scope, AstNode *node) {
|
||||
assert(node->type == NodeTypeAwaitExpr);
|
||||
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.await_expr.expr, parent_scope);
|
||||
IrInstruction *target_inst = ir_gen_node(irb, node->data.await_expr.expr, scope);
|
||||
if (target_inst == irb->codegen->invalid_instruction)
|
||||
return irb->codegen->invalid_instruction;
|
||||
|
||||
@ -6680,7 +6820,7 @@ static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, Ast
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
|
||||
ScopeDeferExpr *scope_defer_expr = get_scope_defer_expr(parent_scope);
|
||||
ScopeDeferExpr *scope_defer_expr = get_scope_defer_expr(scope);
|
||||
if (scope_defer_expr) {
|
||||
if (!scope_defer_expr->reported_err) {
|
||||
add_node_error(irb->codegen, node, buf_sprintf("cannot await inside defer expression"));
|
||||
@ -6691,81 +6831,157 @@ static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, Ast
|
||||
|
||||
Scope *outer_scope = irb->exec->begin_scope;
|
||||
|
||||
IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, parent_scope, node, target_inst);
|
||||
IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, target_inst);
|
||||
Buf *result_ptr_field_name = buf_create_from_str(RESULT_PTR_FIELD_NAME);
|
||||
IrInstruction *result_ptr_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, result_ptr_field_name);
|
||||
IrInstruction *result_ptr_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_ptr_field_name);
|
||||
|
||||
if (irb->codegen->have_err_ret_tracing) {
|
||||
IrInstruction *err_ret_trace_ptr = ir_build_error_return_trace(irb, parent_scope, node, IrInstructionErrorReturnTrace::NonNull);
|
||||
IrInstruction *err_ret_trace_ptr = ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::NonNull);
|
||||
Buf *err_ret_trace_ptr_field_name = buf_create_from_str(ERR_RET_TRACE_PTR_FIELD_NAME);
|
||||
IrInstruction *err_ret_trace_ptr_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, err_ret_trace_ptr_field_name);
|
||||
ir_build_store_ptr(irb, parent_scope, node, err_ret_trace_ptr_field_ptr, err_ret_trace_ptr);
|
||||
IrInstruction *err_ret_trace_ptr_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, err_ret_trace_ptr_field_name);
|
||||
ir_build_store_ptr(irb, scope, node, err_ret_trace_ptr_field_ptr, err_ret_trace_ptr);
|
||||
}
|
||||
|
||||
Buf *awaiter_handle_field_name = buf_create_from_str(AWAITER_HANDLE_FIELD_NAME);
|
||||
IrInstruction *awaiter_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr,
|
||||
awaiter_handle_field_name);
|
||||
IrBasicBlock *already_awaited_block = ir_create_basic_block(irb, scope, "AlreadyAwaited");
|
||||
IrBasicBlock *not_awaited_block = ir_create_basic_block(irb, scope, "NotAwaited");
|
||||
IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
|
||||
IrBasicBlock *yes_suspend_block = ir_create_basic_block(irb, scope, "YesSuspend");
|
||||
IrBasicBlock *no_suspend_block = ir_create_basic_block(irb, scope, "NoSuspend");
|
||||
IrBasicBlock *merge_block = ir_create_basic_block(irb, scope, "MergeSuspend");
|
||||
IrBasicBlock *cleanup_block = ir_create_basic_block(irb, scope, "SuspendCleanup");
|
||||
IrBasicBlock *resume_block = ir_create_basic_block(irb, scope, "SuspendResume");
|
||||
IrBasicBlock *cancel_target_block = ir_create_basic_block(irb, scope, "CancelTarget");
|
||||
IrBasicBlock *do_cancel_block = ir_create_basic_block(irb, scope, "DoCancel");
|
||||
IrBasicBlock *do_defers_block = ir_create_basic_block(irb, scope, "DoDefers");
|
||||
IrBasicBlock *destroy_block = ir_create_basic_block(irb, scope, "DestroyBlock");
|
||||
IrBasicBlock *my_suspended_block = ir_create_basic_block(irb, scope, "AlreadySuspended");
|
||||
IrBasicBlock *my_not_suspended_block = ir_create_basic_block(irb, scope, "NotAlreadySuspended");
|
||||
IrBasicBlock *do_suspend_block = ir_create_basic_block(irb, scope, "DoSuspend");
|
||||
|
||||
IrInstruction *const_bool_false = ir_build_const_bool(irb, parent_scope, node, false);
|
||||
VariableTableEntry *result_var = ir_create_var(irb, node, parent_scope, nullptr,
|
||||
Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
|
||||
IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
|
||||
atomic_state_field_name);
|
||||
|
||||
IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_promise);
|
||||
IrInstruction *const_bool_false = ir_build_const_bool(irb, scope, node, false);
|
||||
IrInstruction *undefined_value = ir_build_const_undefined(irb, scope, node);
|
||||
IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
|
||||
IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
|
||||
IrInstruction *await_mask = ir_build_const_usize(irb, scope, node, 0x4); // 0b100
|
||||
IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
|
||||
IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
|
||||
|
||||
VariableTableEntry *result_var = ir_create_var(irb, node, scope, nullptr,
|
||||
false, false, true, const_bool_false);
|
||||
IrInstruction *undefined_value = ir_build_const_undefined(irb, parent_scope, node);
|
||||
IrInstruction *target_promise_type = ir_build_typeof(irb, parent_scope, node, target_inst);
|
||||
IrInstruction *promise_result_type = ir_build_promise_result_type(irb, parent_scope, node, target_promise_type);
|
||||
ir_build_await_bookkeeping(irb, parent_scope, node, promise_result_type);
|
||||
ir_build_var_decl(irb, parent_scope, node, result_var, promise_result_type, nullptr, undefined_value);
|
||||
IrInstruction *my_result_var_ptr = ir_build_var_ptr(irb, parent_scope, node, result_var);
|
||||
ir_build_store_ptr(irb, parent_scope, node, result_ptr_field_ptr, my_result_var_ptr);
|
||||
IrInstruction *save_token = ir_build_coro_save(irb, parent_scope, node, irb->exec->coro_handle);
|
||||
IrInstruction *promise_type_val = ir_build_const_type(irb, parent_scope, node,
|
||||
get_optional_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
|
||||
IrInstruction *maybe_await_handle = ir_build_atomic_rmw(irb, parent_scope, node,
|
||||
promise_type_val, awaiter_field_ptr, nullptr, irb->exec->coro_handle, nullptr,
|
||||
AtomicRmwOp_xchg, AtomicOrderSeqCst);
|
||||
IrInstruction *is_non_null = ir_build_test_nonnull(irb, parent_scope, node, maybe_await_handle);
|
||||
IrBasicBlock *yes_suspend_block = ir_create_basic_block(irb, parent_scope, "YesSuspend");
|
||||
IrBasicBlock *no_suspend_block = ir_create_basic_block(irb, parent_scope, "NoSuspend");
|
||||
IrBasicBlock *merge_block = ir_create_basic_block(irb, parent_scope, "MergeSuspend");
|
||||
ir_build_cond_br(irb, parent_scope, node, is_non_null, no_suspend_block, yes_suspend_block, const_bool_false);
|
||||
IrInstruction *target_promise_type = ir_build_typeof(irb, scope, node, target_inst);
|
||||
IrInstruction *promise_result_type = ir_build_promise_result_type(irb, scope, node, target_promise_type);
|
||||
ir_build_await_bookkeeping(irb, scope, node, promise_result_type);
|
||||
ir_build_var_decl(irb, scope, node, result_var, promise_result_type, nullptr, undefined_value);
|
||||
IrInstruction *my_result_var_ptr = ir_build_var_ptr(irb, scope, node, result_var);
|
||||
ir_build_store_ptr(irb, scope, node, result_ptr_field_ptr, my_result_var_ptr);
|
||||
IrInstruction *save_token = ir_build_coro_save(irb, scope, node, irb->exec->coro_handle);
|
||||
|
||||
IrInstruction *coro_handle_addr = ir_build_ptr_to_int(irb, scope, node, irb->exec->coro_handle);
|
||||
IrInstruction *mask_bits = ir_build_bin_op(irb, scope, node, IrBinOpBinOr, coro_handle_addr, await_mask, false);
|
||||
IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, atomic_state_ptr, nullptr, mask_bits, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
|
||||
IrInstruction *is_awaited_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, await_mask, false);
|
||||
IrInstruction *is_awaited_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_awaited_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_awaited_bool, already_awaited_block, not_awaited_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, already_awaited_block);
|
||||
ir_build_unreachable(irb, scope, node);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_awaited_block);
|
||||
IrInstruction *await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
|
||||
IrInstruction *is_non_null = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
|
||||
IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, is_canceled_bool, cancel_target_block, not_canceled_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
|
||||
ir_build_cond_br(irb, scope, node, is_non_null, no_suspend_block, yes_suspend_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, cancel_target_block);
|
||||
ir_build_cancel(irb, scope, node, target_inst);
|
||||
ir_mark_gen(ir_build_br(irb, scope, node, cleanup_block, const_bool_false));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, no_suspend_block);
|
||||
if (irb->codegen->have_err_ret_tracing) {
|
||||
Buf *err_ret_trace_field_name = buf_create_from_str(ERR_RET_TRACE_FIELD_NAME);
|
||||
IrInstruction *src_err_ret_trace_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, err_ret_trace_field_name);
|
||||
IrInstruction *dest_err_ret_trace_ptr = ir_build_error_return_trace(irb, parent_scope, node, IrInstructionErrorReturnTrace::NonNull);
|
||||
ir_build_merge_err_ret_traces(irb, parent_scope, node, coro_promise_ptr, src_err_ret_trace_ptr, dest_err_ret_trace_ptr);
|
||||
IrInstruction *src_err_ret_trace_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, err_ret_trace_field_name);
|
||||
IrInstruction *dest_err_ret_trace_ptr = ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::NonNull);
|
||||
ir_build_merge_err_ret_traces(irb, scope, node, coro_promise_ptr, src_err_ret_trace_ptr, dest_err_ret_trace_ptr);
|
||||
}
|
||||
Buf *result_field_name = buf_create_from_str(RESULT_FIELD_NAME);
|
||||
IrInstruction *promise_result_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, result_field_name);
|
||||
IrInstruction *promise_result_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_field_name);
|
||||
// If the type of the result handle_is_ptr then this does not actually perform a load. But we need it to,
|
||||
// because we're about to destroy the memory. So we store it into our result variable.
|
||||
IrInstruction *no_suspend_result = ir_build_load_ptr(irb, parent_scope, node, promise_result_ptr);
|
||||
ir_build_store_ptr(irb, parent_scope, node, my_result_var_ptr, no_suspend_result);
|
||||
ir_build_cancel(irb, parent_scope, node, target_inst);
|
||||
ir_build_br(irb, parent_scope, node, merge_block, const_bool_false);
|
||||
IrInstruction *no_suspend_result = ir_build_load_ptr(irb, scope, node, promise_result_ptr);
|
||||
ir_build_store_ptr(irb, scope, node, my_result_var_ptr, no_suspend_result);
|
||||
ir_build_cancel(irb, scope, node, target_inst);
|
||||
ir_build_br(irb, scope, node, merge_block, const_bool_false);
|
||||
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, yes_suspend_block);
|
||||
IrInstruction *suspend_code = ir_build_coro_suspend(irb, parent_scope, node, save_token, const_bool_false);
|
||||
IrBasicBlock *cleanup_block = ir_create_basic_block(irb, parent_scope, "SuspendCleanup");
|
||||
IrBasicBlock *resume_block = ir_create_basic_block(irb, parent_scope, "SuspendResume");
|
||||
IrInstruction *my_prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, is_suspended_mask, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
IrInstruction *my_is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, my_prev_atomic_value, is_suspended_mask, false);
|
||||
IrInstruction *my_is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, my_is_suspended_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, my_is_suspended_bool, my_suspended_block, my_not_suspended_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, my_suspended_block);
|
||||
ir_build_unreachable(irb, scope, node);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, my_not_suspended_block);
|
||||
IrInstruction *my_is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, my_prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *my_is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, my_is_canceled_value, zero, false);
|
||||
ir_build_cond_br(irb, scope, node, my_is_canceled_bool, cleanup_block, do_suspend_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, do_suspend_block);
|
||||
IrInstruction *suspend_code = ir_build_coro_suspend(irb, scope, node, save_token, const_bool_false);
|
||||
|
||||
IrInstructionSwitchBrCase *cases = allocate<IrInstructionSwitchBrCase>(2);
|
||||
cases[0].value = ir_build_const_u8(irb, parent_scope, node, 0);
|
||||
cases[0].value = ir_build_const_u8(irb, scope, node, 0);
|
||||
cases[0].block = resume_block;
|
||||
cases[1].value = ir_build_const_u8(irb, parent_scope, node, 1);
|
||||
cases[1].block = cleanup_block;
|
||||
ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block,
|
||||
cases[1].value = ir_build_const_u8(irb, scope, node, 1);
|
||||
cases[1].block = destroy_block;
|
||||
ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block,
|
||||
2, cases, const_bool_false, nullptr);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, destroy_block);
|
||||
ir_gen_cancel_target(irb, scope, node, target_inst, false, true);
|
||||
ir_mark_gen(ir_build_br(irb, scope, node, cleanup_block, const_bool_false));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, cleanup_block);
|
||||
ir_gen_defers_for_block(irb, parent_scope, outer_scope, true);
|
||||
ir_mark_gen(ir_build_br(irb, parent_scope, node, irb->exec->coro_final_cleanup_block, const_bool_false));
|
||||
IrInstruction *my_mask_bits = ir_build_bin_op(irb, scope, node, IrBinOpBinOr, ptr_mask, is_canceled_mask, false);
|
||||
IrInstruction *b_my_prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
|
||||
usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, my_mask_bits, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
IrInstruction *my_await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, b_my_prev_atomic_value, ptr_mask, false);
|
||||
IrInstruction *dont_have_my_await_handle = ir_build_bin_op(irb, scope, node, IrBinOpCmpEq, my_await_handle_addr, zero, false);
|
||||
IrInstruction *dont_destroy_ourselves = ir_build_bin_op(irb, scope, node, IrBinOpBoolAnd, dont_have_my_await_handle, is_canceled_bool, false);
|
||||
ir_build_cond_br(irb, scope, node, dont_have_my_await_handle, do_defers_block, do_cancel_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, do_cancel_block);
|
||||
IrInstruction *my_await_handle = ir_build_int_to_ptr(irb, scope, node, promise_type_val, my_await_handle_addr);
|
||||
ir_gen_cancel_target(irb, scope, node, my_await_handle, true, false);
|
||||
ir_mark_gen(ir_build_br(irb, scope, node, do_defers_block, const_bool_false));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, do_defers_block);
|
||||
ir_gen_defers_for_block(irb, scope, outer_scope, true);
|
||||
ir_mark_gen(ir_build_cond_br(irb, scope, node, dont_destroy_ourselves, irb->exec->coro_early_final, irb->exec->coro_final_cleanup_block, const_bool_false));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, resume_block);
|
||||
ir_build_br(irb, parent_scope, node, merge_block, const_bool_false);
|
||||
ir_build_br(irb, scope, node, merge_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, merge_block);
|
||||
return ir_build_load_ptr(irb, parent_scope, node, my_result_var_ptr);
|
||||
return ir_build_load_ptr(irb, scope, node, my_result_var_ptr);
|
||||
}
|
||||
|
||||
static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
|
||||
@ -6804,9 +7020,52 @@ static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNod
|
||||
|
||||
IrBasicBlock *cleanup_block = ir_create_basic_block(irb, parent_scope, "SuspendCleanup");
|
||||
IrBasicBlock *resume_block = ir_create_basic_block(irb, parent_scope, "SuspendResume");
|
||||
IrBasicBlock *suspended_block = ir_create_basic_block(irb, parent_scope, "AlreadySuspended");
|
||||
IrBasicBlock *canceled_block = ir_create_basic_block(irb, parent_scope, "IsCanceled");
|
||||
IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, parent_scope, "NotCanceled");
|
||||
IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, parent_scope, "NotAlreadySuspended");
|
||||
IrBasicBlock *cancel_awaiter_block = ir_create_basic_block(irb, parent_scope, "CancelAwaiter");
|
||||
|
||||
IrInstruction *suspend_code;
|
||||
IrInstruction *promise_type_val = ir_build_const_type(irb, parent_scope, node, irb->codegen->builtin_types.entry_promise);
|
||||
IrInstruction *const_bool_true = ir_build_const_bool(irb, parent_scope, node, true);
|
||||
IrInstruction *const_bool_false = ir_build_const_bool(irb, parent_scope, node, false);
|
||||
IrInstruction *usize_type_val = ir_build_const_type(irb, parent_scope, node, irb->codegen->builtin_types.entry_usize);
|
||||
IrInstruction *is_canceled_mask = ir_build_const_usize(irb, parent_scope, node, 0x1); // 0b001
|
||||
IrInstruction *is_suspended_mask = ir_build_const_usize(irb, parent_scope, node, 0x2); // 0b010
|
||||
IrInstruction *zero = ir_build_const_usize(irb, parent_scope, node, 0);
|
||||
IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, parent_scope, node, 0x7); // 0b111
|
||||
IrInstruction *ptr_mask = ir_build_un_op(irb, parent_scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
|
||||
|
||||
IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, parent_scope, node,
|
||||
usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, is_suspended_mask, nullptr,
|
||||
AtomicRmwOp_or, AtomicOrderSeqCst);
|
||||
|
||||
IrInstruction *is_canceled_value = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
|
||||
IrInstruction *is_canceled_bool = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
|
||||
ir_build_cond_br(irb, parent_scope, node, is_canceled_bool, canceled_block, not_canceled_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, canceled_block);
|
||||
IrInstruction *await_handle_addr = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
|
||||
IrInstruction *have_await_handle = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
|
||||
IrBasicBlock *post_canceled_block = irb->current_basic_block;
|
||||
ir_build_cond_br(irb, parent_scope, node, have_await_handle, cancel_awaiter_block, cleanup_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, cancel_awaiter_block);
|
||||
IrInstruction *await_handle = ir_build_int_to_ptr(irb, parent_scope, node, promise_type_val, await_handle_addr);
|
||||
ir_gen_cancel_target(irb, parent_scope, node, await_handle, true, false);
|
||||
IrBasicBlock *post_cancel_awaiter_block = irb->current_basic_block;
|
||||
ir_build_br(irb, parent_scope, node, cleanup_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
|
||||
IrInstruction *is_suspended_value = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
|
||||
IrInstruction *is_suspended_bool = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
|
||||
ir_build_cond_br(irb, parent_scope, node, is_suspended_bool, suspended_block, not_suspended_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, suspended_block);
|
||||
ir_build_unreachable(irb, parent_scope, node);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
|
||||
IrInstruction *suspend_code;
|
||||
if (node->data.suspend.block == nullptr) {
|
||||
suspend_code = ir_build_coro_suspend(irb, parent_scope, node, nullptr, const_bool_false);
|
||||
} else {
|
||||
@ -6834,13 +7093,20 @@ static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNod
|
||||
cases[0].value = ir_mark_gen(ir_build_const_u8(irb, parent_scope, node, 0));
|
||||
cases[0].block = resume_block;
|
||||
cases[1].value = ir_mark_gen(ir_build_const_u8(irb, parent_scope, node, 1));
|
||||
cases[1].block = cleanup_block;
|
||||
cases[1].block = canceled_block;
|
||||
ir_mark_gen(ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block,
|
||||
2, cases, const_bool_false, nullptr));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, cleanup_block);
|
||||
IrBasicBlock **incoming_blocks = allocate<IrBasicBlock *>(2);
|
||||
IrInstruction **incoming_values = allocate<IrInstruction *>(2);
|
||||
incoming_blocks[0] = post_canceled_block;
|
||||
incoming_values[0] = const_bool_true;
|
||||
incoming_blocks[1] = post_cancel_awaiter_block;
|
||||
incoming_values[1] = const_bool_false;
|
||||
IrInstruction *destroy_ourselves = ir_build_phi(irb, parent_scope, node, 2, incoming_blocks, incoming_values);
|
||||
ir_gen_defers_for_block(irb, parent_scope, outer_scope, true);
|
||||
ir_mark_gen(ir_build_br(irb, parent_scope, node, irb->exec->coro_final_cleanup_block, const_bool_false));
|
||||
ir_mark_gen(ir_build_cond_br(irb, parent_scope, node, destroy_ourselves, irb->exec->coro_final_cleanup_block, irb->exec->coro_early_final, const_bool_false));
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, resume_block);
|
||||
return ir_mark_gen(ir_build_const_void(irb, parent_scope, node));
|
||||
@ -7081,10 +7347,11 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
|
||||
IrInstruction *coro_mem_ptr = ir_build_ptr_cast(irb, coro_scope, node, u8_ptr_type, maybe_coro_mem_ptr);
|
||||
irb->exec->coro_handle = ir_build_coro_begin(irb, coro_scope, node, coro_id, coro_mem_ptr);
|
||||
|
||||
Buf *awaiter_handle_field_name = buf_create_from_str(AWAITER_HANDLE_FIELD_NAME);
|
||||
irb->exec->coro_awaiter_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
|
||||
awaiter_handle_field_name);
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->coro_awaiter_field_ptr, null_value);
|
||||
Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
|
||||
irb->exec->atomic_state_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
|
||||
atomic_state_field_name);
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
ir_build_store_ptr(irb, scope, node, irb->exec->atomic_state_field_ptr, zero);
|
||||
Buf *result_field_name = buf_create_from_str(RESULT_FIELD_NAME);
|
||||
irb->exec->coro_result_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_field_name);
|
||||
result_ptr_field_name = buf_create_from_str(RESULT_PTR_FIELD_NAME);
|
||||
@ -7102,7 +7369,6 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
|
||||
// coordinate with builtin.zig
|
||||
Buf *index_name = buf_create_from_str("index");
|
||||
IrInstruction *index_ptr = ir_build_field_ptr(irb, scope, node, err_ret_trace_ptr, index_name);
|
||||
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
|
||||
ir_build_store_ptr(irb, scope, node, index_ptr, zero);
|
||||
|
||||
Buf *instruction_addresses_name = buf_create_from_str("instruction_addresses");
|
||||
@ -7225,7 +7491,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
|
||||
ir_build_cond_br(irb, scope, node, resume_awaiter, resume_block, irb->exec->coro_suspend_block, const_bool_false);
|
||||
|
||||
ir_set_cursor_at_end_and_append_block(irb, resume_block);
|
||||
ir_build_coro_resume(irb, scope, node, awaiter_handle);
|
||||
ir_gen_resume_target(irb, scope, node, awaiter_handle);
|
||||
ir_build_br(irb, scope, node, irb->exec->coro_suspend_block, const_bool_false);
|
||||
}
|
||||
|
||||
@ -12142,7 +12408,7 @@ static TypeTableEntry *ir_analyze_instruction_decl_var(IrAnalyze *ira, IrInstruc
|
||||
result_type = ira->codegen->builtin_types.entry_invalid;
|
||||
} else if (type_requires_comptime(result_type)) {
|
||||
var_class_requires_const = true;
|
||||
if (!var->src_is_const && !is_comptime_var) {
|
||||
if (!var->gen_is_const && !is_comptime_var) {
|
||||
ir_add_error_node(ira, source_node,
|
||||
buf_sprintf("variable of type '%s' must be const or comptime",
|
||||
buf_ptr(&result_type->name)));
|
||||
@ -12591,6 +12857,7 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
|
||||
}
|
||||
|
||||
Buf *param_name = param_decl_node->data.param_decl.name;
|
||||
if (!param_name) return false;
|
||||
if (!is_var_args) {
|
||||
VariableTableEntry *var = add_variable(ira->codegen, param_decl_node,
|
||||
*child_scope, param_name, true, arg_val, nullptr);
|
||||
@ -18991,6 +19258,9 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_err_payload(IrAnalyze *ira,
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
} else if (type_entry->id == TypeTableEntryIdErrorUnion) {
|
||||
TypeTableEntry *payload_type = type_entry->data.error_union.payload_type;
|
||||
if (type_is_invalid(payload_type)) {
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, payload_type,
|
||||
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
|
||||
PtrLenSingle,
|
||||
|
@ -45,6 +45,10 @@ static void ir_print_var_instruction(IrPrint *irp, IrInstruction *instruction) {
|
||||
}
|
||||
|
||||
static void ir_print_other_instruction(IrPrint *irp, IrInstruction *instruction) {
|
||||
if (instruction == nullptr) {
|
||||
fprintf(irp->f, "(null)");
|
||||
return;
|
||||
}
|
||||
if (instruction->value.special != ConstValSpecialRuntime) {
|
||||
ir_print_const_value(irp, &instruction->value);
|
||||
} else {
|
||||
|
@ -648,30 +648,12 @@ static AstNode *ast_parse_asm_expr(ParseContext *pc, size_t *token_index, bool m
|
||||
}
|
||||
|
||||
/*
|
||||
SuspendExpression(body) = option(Symbol ":") "suspend" option(("|" Symbol "|" body))
|
||||
SuspendExpression(body) = "suspend" option(("|" Symbol "|" body))
|
||||
*/
|
||||
static AstNode *ast_parse_suspend_block(ParseContext *pc, size_t *token_index, bool mandatory) {
|
||||
size_t orig_token_index = *token_index;
|
||||
|
||||
Token *name_token = nullptr;
|
||||
Token *token = &pc->tokens->at(*token_index);
|
||||
if (token->id == TokenIdSymbol) {
|
||||
*token_index += 1;
|
||||
Token *colon_token = &pc->tokens->at(*token_index);
|
||||
if (colon_token->id == TokenIdColon) {
|
||||
*token_index += 1;
|
||||
name_token = token;
|
||||
token = &pc->tokens->at(*token_index);
|
||||
} else if (mandatory) {
|
||||
ast_expect_token(pc, colon_token, TokenIdColon);
|
||||
zig_unreachable();
|
||||
} else {
|
||||
*token_index = orig_token_index;
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
Token *suspend_token = token;
|
||||
Token *suspend_token = &pc->tokens->at(*token_index);
|
||||
if (suspend_token->id == TokenIdKeywordSuspend) {
|
||||
*token_index += 1;
|
||||
} else if (mandatory) {
|
||||
@ -693,9 +675,6 @@ static AstNode *ast_parse_suspend_block(ParseContext *pc, size_t *token_index, b
|
||||
}
|
||||
|
||||
AstNode *node = ast_create_node(pc, NodeTypeSuspend, suspend_token);
|
||||
if (name_token != nullptr) {
|
||||
node->data.suspend.name = token_buf(name_token);
|
||||
}
|
||||
node->data.suspend.promise_symbol = ast_parse_symbol(pc, token_index);
|
||||
ast_eat_token(pc, token_index, TokenIdBinOr);
|
||||
node->data.suspend.block = ast_parse_block(pc, token_index, true);
|
||||
|
@ -807,6 +807,7 @@ pub const LibExeObjStep = struct {
|
||||
disable_libc: bool,
|
||||
frameworks: BufSet,
|
||||
verbose_link: bool,
|
||||
no_rosegment: bool,
|
||||
|
||||
// zig only stuff
|
||||
root_src: ?[]const u8,
|
||||
@ -874,6 +875,7 @@ pub const LibExeObjStep = struct {
|
||||
|
||||
fn initExtraArgs(builder: *Builder, name: []const u8, root_src: ?[]const u8, kind: Kind, static: bool, ver: *const Version) LibExeObjStep {
|
||||
var self = LibExeObjStep{
|
||||
.no_rosegment = false,
|
||||
.strip = false,
|
||||
.builder = builder,
|
||||
.verbose_link = false,
|
||||
@ -914,6 +916,7 @@ pub const LibExeObjStep = struct {
|
||||
|
||||
fn initC(builder: *Builder, name: []const u8, kind: Kind, version: *const Version, static: bool) LibExeObjStep {
|
||||
var self = LibExeObjStep{
|
||||
.no_rosegment = false,
|
||||
.builder = builder,
|
||||
.name = name,
|
||||
.kind = kind,
|
||||
@ -953,6 +956,10 @@ pub const LibExeObjStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn setNoRoSegment(self: *LibExeObjStep, value: bool) void {
|
||||
self.no_rosegment = value;
|
||||
}
|
||||
|
||||
fn computeOutFileNames(self: *LibExeObjStep) void {
|
||||
switch (self.kind) {
|
||||
Kind.Obj => {
|
||||
@ -1306,6 +1313,10 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (self.no_rosegment) {
|
||||
try zig_args.append("--no-rosegment");
|
||||
}
|
||||
|
||||
try builder.spawnChild(zig_args.toSliceConst());
|
||||
|
||||
if (self.kind == Kind.Lib and !self.static and self.target.wantSharedLibSymLinks()) {
|
||||
@ -1598,6 +1609,7 @@ pub const TestStep = struct {
|
||||
include_dirs: ArrayList([]const u8),
|
||||
lib_paths: ArrayList([]const u8),
|
||||
object_files: ArrayList([]const u8),
|
||||
no_rosegment: bool,
|
||||
|
||||
pub fn init(builder: *Builder, root_src: []const u8) TestStep {
|
||||
const step_name = builder.fmt("test {}", root_src);
|
||||
@ -1615,9 +1627,14 @@ pub const TestStep = struct {
|
||||
.include_dirs = ArrayList([]const u8).init(builder.allocator),
|
||||
.lib_paths = ArrayList([]const u8).init(builder.allocator),
|
||||
.object_files = ArrayList([]const u8).init(builder.allocator),
|
||||
.no_rosegment = false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setNoRoSegment(self: *TestStep, value: bool) void {
|
||||
self.no_rosegment = value;
|
||||
}
|
||||
|
||||
pub fn addLibPath(self: *TestStep, path: []const u8) void {
|
||||
self.lib_paths.append(path) catch unreachable;
|
||||
}
|
||||
@ -1761,6 +1778,10 @@ pub const TestStep = struct {
|
||||
try zig_args.append(lib_path);
|
||||
}
|
||||
|
||||
if (self.no_rosegment) {
|
||||
try zig_args.append("--no-rosegment");
|
||||
}
|
||||
|
||||
try builder.spawnChild(zig_args.toSliceConst());
|
||||
}
|
||||
};
|
||||
|
@ -27,7 +27,7 @@ pub fn warn(comptime fmt: []const u8, args: ...) void {
|
||||
const stderr = getStderrStream() catch return;
|
||||
stderr.print(fmt, args) catch return;
|
||||
}
|
||||
fn getStderrStream() !*io.OutStream(io.FileOutStream.Error) {
|
||||
pub fn getStderrStream() !*io.OutStream(io.FileOutStream.Error) {
|
||||
if (stderr_stream) |st| {
|
||||
return st;
|
||||
} else {
|
||||
@ -172,6 +172,16 @@ pub fn writeStackTrace(stack_trace: *const builtin.StackTrace, out_stream: var,
|
||||
}
|
||||
}
|
||||
|
||||
pub inline fn getReturnAddress(frame_count: usize) usize {
|
||||
var fp = @ptrToInt(@frameAddress());
|
||||
var i: usize = 0;
|
||||
while (fp != 0 and i < frame_count) {
|
||||
fp = @intToPtr(*const usize, fp).*;
|
||||
i += 1;
|
||||
}
|
||||
return @intToPtr(*const usize, fp + @sizeOf(usize)).*;
|
||||
}
|
||||
|
||||
pub fn writeCurrentStackTrace(out_stream: var, allocator: *mem.Allocator, debug_info: *ElfStackTrace, tty_color: bool, start_addr: ?usize) !void {
|
||||
const AddressState = union(enum) {
|
||||
NotLookingForStartAddress,
|
||||
@ -205,7 +215,7 @@ pub fn writeCurrentStackTrace(out_stream: var, allocator: *mem.Allocator, debug_
|
||||
}
|
||||
}
|
||||
|
||||
fn printSourceAtAddress(debug_info: *ElfStackTrace, out_stream: var, address: usize, tty_color: bool) !void {
|
||||
pub fn printSourceAtAddress(debug_info: *ElfStackTrace, out_stream: var, address: usize, tty_color: bool) !void {
|
||||
switch (builtin.os) {
|
||||
builtin.Os.windows => return error.UnsupportedDebugInfo,
|
||||
builtin.Os.macosx => {
|
||||
|
@ -71,11 +71,6 @@ pub fn Channel(comptime T: type) type {
|
||||
/// puts a data item in the channel. The promise completes when the value has been added to the
|
||||
/// buffer, or in the case of a zero size buffer, when the item has been retrieved by a getter.
|
||||
pub async fn put(self: *SelfChannel, data: T) void {
|
||||
// TODO should be able to group memory allocation failure before first suspend point
|
||||
// so that the async invocation catches it
|
||||
var dispatch_tick_node_ptr: *Loop.NextTickNode = undefined;
|
||||
_ = async self.dispatch(&dispatch_tick_node_ptr) catch unreachable;
|
||||
|
||||
suspend |handle| {
|
||||
var my_tick_node = Loop.NextTickNode{
|
||||
.next = undefined,
|
||||
@ -91,18 +86,13 @@ pub fn Channel(comptime T: type) type {
|
||||
self.putters.put(&queue_node);
|
||||
_ = @atomicRmw(usize, &self.put_count, AtomicRmwOp.Add, 1, AtomicOrder.SeqCst);
|
||||
|
||||
self.loop.onNextTick(dispatch_tick_node_ptr);
|
||||
self.dispatch();
|
||||
}
|
||||
}
|
||||
|
||||
/// await this function to get an item from the channel. If the buffer is empty, the promise will
|
||||
/// complete when the next item is put in the channel.
|
||||
pub async fn get(self: *SelfChannel) T {
|
||||
// TODO should be able to group memory allocation failure before first suspend point
|
||||
// so that the async invocation catches it
|
||||
var dispatch_tick_node_ptr: *Loop.NextTickNode = undefined;
|
||||
_ = async self.dispatch(&dispatch_tick_node_ptr) catch unreachable;
|
||||
|
||||
// TODO integrate this function with named return values
|
||||
// so we can get rid of this extra result copy
|
||||
var result: T = undefined;
|
||||
@ -121,21 +111,12 @@ pub fn Channel(comptime T: type) type {
|
||||
self.getters.put(&queue_node);
|
||||
_ = @atomicRmw(usize, &self.get_count, AtomicRmwOp.Add, 1, AtomicOrder.SeqCst);
|
||||
|
||||
self.loop.onNextTick(dispatch_tick_node_ptr);
|
||||
self.dispatch();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async fn dispatch(self: *SelfChannel, tick_node_ptr: **Loop.NextTickNode) void {
|
||||
// resumed by onNextTick
|
||||
suspend |handle| {
|
||||
var tick_node = Loop.NextTickNode{
|
||||
.data = handle,
|
||||
.next = undefined,
|
||||
};
|
||||
tick_node_ptr.* = &tick_node;
|
||||
}
|
||||
|
||||
fn dispatch(self: *SelfChannel) void {
|
||||
// set the "need dispatch" flag
|
||||
_ = @atomicRmw(u8, &self.need_dispatch, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
|
||||
|
||||
|
@ -55,7 +55,7 @@ pub const Loop = struct {
|
||||
/// After initialization, call run().
|
||||
/// TODO copy elision / named return values so that the threads referencing *Loop
|
||||
/// have the correct pointer value.
|
||||
fn initSingleThreaded(self: *Loop, allocator: *mem.Allocator) !void {
|
||||
pub fn initSingleThreaded(self: *Loop, allocator: *mem.Allocator) !void {
|
||||
return self.initInternal(allocator, 1);
|
||||
}
|
||||
|
||||
@ -64,7 +64,7 @@ pub const Loop = struct {
|
||||
/// After initialization, call run().
|
||||
/// TODO copy elision / named return values so that the threads referencing *Loop
|
||||
/// have the correct pointer value.
|
||||
fn initMultiThreaded(self: *Loop, allocator: *mem.Allocator) !void {
|
||||
pub fn initMultiThreaded(self: *Loop, allocator: *mem.Allocator) !void {
|
||||
const core_count = try std.os.cpuCount(allocator);
|
||||
return self.initInternal(allocator, core_count);
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context),
|
||||
OpenBrace,
|
||||
CloseBrace,
|
||||
FormatString,
|
||||
Pointer,
|
||||
};
|
||||
|
||||
comptime var start_index = 0;
|
||||
@ -54,6 +55,7 @@ pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'*' => state = State.Pointer,
|
||||
else => {
|
||||
state = State.FormatString;
|
||||
},
|
||||
@ -75,6 +77,17 @@ pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context),
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
State.Pointer => switch (c) {
|
||||
'}' => {
|
||||
try output(context, @typeName(@typeOf(args[next_arg]).Child));
|
||||
try output(context, "@");
|
||||
try formatInt(@ptrToInt(args[next_arg]), 16, false, 0, context, Errors, output);
|
||||
next_arg += 1;
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
else => @compileError("Unexpected format character after '*'"),
|
||||
},
|
||||
}
|
||||
}
|
||||
comptime {
|
||||
@ -861,6 +874,27 @@ test "fmt.format" {
|
||||
const value: u8 = 'a';
|
||||
try testFmt("u8: a\n", "u8: {c}\n", value);
|
||||
}
|
||||
{
|
||||
const value: [3]u8 = "abc";
|
||||
try testFmt("array: abc\n", "array: {}\n", value);
|
||||
try testFmt("array: abc\n", "array: {}\n", &value);
|
||||
|
||||
var buf: [100]u8 = undefined;
|
||||
try testFmt(
|
||||
try bufPrint(buf[0..], "array: [3]u8@{x}\n", @ptrToInt(&value)),
|
||||
"array: {*}\n",
|
||||
&value,
|
||||
);
|
||||
}
|
||||
{
|
||||
const value: []const u8 = "abc";
|
||||
try testFmt("slice: abc\n", "slice: {}\n", value);
|
||||
}
|
||||
{
|
||||
const value = @intToPtr(*i32, 0xdeadbeef);
|
||||
try testFmt("pointer: i32@deadbeef\n", "pointer: {}\n", value);
|
||||
try testFmt("pointer: i32@deadbeef\n", "pointer: {*}\n", value);
|
||||
}
|
||||
try testFmt("buf: Test \n", "buf: {s5}\n", "Test");
|
||||
try testFmt("buf: Test\n Other text", "buf: {s}\n Other text", "Test");
|
||||
try testFmt("cstr: Test C\n", "cstr: {s}\n", c"Test C");
|
||||
|
165
std/io.zig
165
std/io.zig
@ -200,6 +200,13 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
try self.readNoEof(input_slice);
|
||||
return mem.readInt(input_slice, T, endian);
|
||||
}
|
||||
|
||||
pub fn skipBytes(self: *Self, num_bytes: usize) !void {
|
||||
var i: usize = 0;
|
||||
while (i < num_bytes) : (i += 1) {
|
||||
_ = try self.readByte();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -230,6 +237,20 @@ pub fn OutStream(comptime WriteError: type) type {
|
||||
try self.writeFn(self, slice);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeIntLe(self: *Self, comptime T: type, value: T) !void {
|
||||
return self.writeInt(builtin.Endian.Little, T, value);
|
||||
}
|
||||
|
||||
pub fn writeIntBe(self: *Self, comptime T: type, value: T) !void {
|
||||
return self.writeInt(builtin.Endian.Big, T, value);
|
||||
}
|
||||
|
||||
pub fn writeInt(self: *Self, endian: builtin.Endian, comptime T: type, value: T) !void {
|
||||
var bytes: [@sizeOf(T)]u8 = undefined;
|
||||
mem.writeInt(bytes[0..], value, endian);
|
||||
return self.writeFn(self, bytes);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -331,6 +352,150 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
|
||||
};
|
||||
}
|
||||
|
||||
/// Creates a stream which supports 'un-reading' data, so that it can be read again.
|
||||
/// This makes look-ahead style parsing much easier.
|
||||
pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) type {
|
||||
return struct {
|
||||
const Self = this;
|
||||
pub const Error = InStreamError;
|
||||
pub const Stream = InStream(Error);
|
||||
|
||||
pub stream: Stream,
|
||||
base: *Stream,
|
||||
|
||||
// Right now the look-ahead space is statically allocated, but a version with dynamic allocation
|
||||
// is not too difficult to derive from this.
|
||||
buffer: [buffer_size]u8,
|
||||
index: usize,
|
||||
at_end: bool,
|
||||
|
||||
pub fn init(base: *Stream) Self {
|
||||
return Self{
|
||||
.base = base,
|
||||
.buffer = undefined,
|
||||
.index = 0,
|
||||
.at_end = false,
|
||||
.stream = Stream{ .readFn = readFn },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn putBackByte(self: *Self, byte: u8) void {
|
||||
self.buffer[self.index] = byte;
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
pub fn putBack(self: *Self, bytes: []const u8) void {
|
||||
var pos = bytes.len;
|
||||
while (pos != 0) {
|
||||
pos -= 1;
|
||||
self.putBackByte(bytes[pos]);
|
||||
}
|
||||
}
|
||||
|
||||
fn readFn(in_stream: *Stream, dest: []u8) Error!usize {
|
||||
const self = @fieldParentPtr(Self, "stream", in_stream);
|
||||
|
||||
// copy over anything putBack()'d
|
||||
var pos: usize = 0;
|
||||
while (pos < dest.len and self.index != 0) {
|
||||
dest[pos] = self.buffer[self.index - 1];
|
||||
self.index -= 1;
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
if (pos == dest.len or self.at_end) {
|
||||
return pos;
|
||||
}
|
||||
|
||||
// ask the backing stream for more
|
||||
const left = dest.len - pos;
|
||||
const read = try self.base.read(dest[pos..]);
|
||||
assert(read <= left);
|
||||
|
||||
self.at_end = (read < left);
|
||||
return pos + read;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
pub const SliceInStream = struct {
|
||||
const Self = this;
|
||||
pub const Error = error { };
|
||||
pub const Stream = InStream(Error);
|
||||
|
||||
pub stream: Stream,
|
||||
|
||||
pos: usize,
|
||||
slice: []const u8,
|
||||
|
||||
pub fn init(slice: []const u8) Self {
|
||||
return Self{
|
||||
.slice = slice,
|
||||
.pos = 0,
|
||||
.stream = Stream{ .readFn = readFn },
|
||||
};
|
||||
}
|
||||
|
||||
fn readFn(in_stream: *Stream, dest: []u8) Error!usize {
|
||||
const self = @fieldParentPtr(Self, "stream", in_stream);
|
||||
const size = math.min(dest.len, self.slice.len - self.pos);
|
||||
const end = self.pos + size;
|
||||
|
||||
mem.copy(u8, dest[0..size], self.slice[self.pos..end]);
|
||||
self.pos = end;
|
||||
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
||||
/// This is a simple OutStream that writes to a slice, and returns an error
|
||||
/// when it runs out of space.
|
||||
pub const SliceOutStream = struct {
|
||||
pub const Error = error{OutOfSpace};
|
||||
pub const Stream = OutStream(Error);
|
||||
|
||||
pub stream: Stream,
|
||||
|
||||
pos: usize,
|
||||
slice: []u8,
|
||||
|
||||
pub fn init(slice: []u8) SliceOutStream {
|
||||
return SliceOutStream{
|
||||
.slice = slice,
|
||||
.pos = 0,
|
||||
.stream = Stream{ .writeFn = writeFn },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getWritten(self: *const SliceOutStream) []const u8 {
|
||||
return self.slice[0..self.pos];
|
||||
}
|
||||
|
||||
pub fn reset(self: *SliceOutStream) void {
|
||||
self.pos = 0;
|
||||
}
|
||||
|
||||
fn writeFn(out_stream: *Stream, bytes: []const u8) Error!void {
|
||||
const self = @fieldParentPtr(SliceOutStream, "stream", out_stream);
|
||||
|
||||
assert(self.pos <= self.slice.len);
|
||||
|
||||
const n =
|
||||
if (self.pos + bytes.len <= self.slice.len)
|
||||
bytes.len
|
||||
else
|
||||
self.slice.len - self.pos;
|
||||
|
||||
std.mem.copy(u8, self.slice[self.pos..self.pos + n], bytes[0..n]);
|
||||
self.pos += n;
|
||||
|
||||
if (n < bytes.len) {
|
||||
return Error.OutOfSpace;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn BufferedOutStream(comptime Error: type) type {
|
||||
return BufferedOutStreamCustom(os.page_size, Error);
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ const std = @import("index.zig");
|
||||
const io = std.io;
|
||||
const DefaultPrng = std.rand.DefaultPrng;
|
||||
const assert = std.debug.assert;
|
||||
const assertError = std.debug.assertError;
|
||||
const mem = std.mem;
|
||||
const os = std.os;
|
||||
const builtin = @import("builtin");
|
||||
@ -60,3 +61,74 @@ test "BufferOutStream" {
|
||||
|
||||
assert(mem.eql(u8, buffer.toSlice(), "x: 42\ny: 1234\n"));
|
||||
}
|
||||
|
||||
test "SliceInStream" {
|
||||
const bytes = []const u8 { 1, 2, 3, 4, 5, 6, 7 };
|
||||
var ss = io.SliceInStream.init(bytes);
|
||||
|
||||
var dest: [4]u8 = undefined;
|
||||
|
||||
var read = try ss.stream.read(dest[0..4]);
|
||||
assert(read == 4);
|
||||
assert(mem.eql(u8, dest[0..4], bytes[0..4]));
|
||||
|
||||
read = try ss.stream.read(dest[0..4]);
|
||||
assert(read == 3);
|
||||
assert(mem.eql(u8, dest[0..3], bytes[4..7]));
|
||||
|
||||
read = try ss.stream.read(dest[0..4]);
|
||||
assert(read == 0);
|
||||
}
|
||||
|
||||
test "PeekStream" {
|
||||
const bytes = []const u8 { 1, 2, 3, 4, 5, 6, 7, 8 };
|
||||
var ss = io.SliceInStream.init(bytes);
|
||||
var ps = io.PeekStream(2, io.SliceInStream.Error).init(&ss.stream);
|
||||
|
||||
var dest: [4]u8 = undefined;
|
||||
|
||||
ps.putBackByte(9);
|
||||
ps.putBackByte(10);
|
||||
|
||||
var read = try ps.stream.read(dest[0..4]);
|
||||
assert(read == 4);
|
||||
assert(dest[0] == 10);
|
||||
assert(dest[1] == 9);
|
||||
assert(mem.eql(u8, dest[2..4], bytes[0..2]));
|
||||
|
||||
read = try ps.stream.read(dest[0..4]);
|
||||
assert(read == 4);
|
||||
assert(mem.eql(u8, dest[0..4], bytes[2..6]));
|
||||
|
||||
read = try ps.stream.read(dest[0..4]);
|
||||
assert(read == 2);
|
||||
assert(mem.eql(u8, dest[0..2], bytes[6..8]));
|
||||
|
||||
ps.putBackByte(11);
|
||||
ps.putBackByte(12);
|
||||
|
||||
read = try ps.stream.read(dest[0..4]);
|
||||
assert(read == 2);
|
||||
assert(dest[0] == 12);
|
||||
assert(dest[1] == 11);
|
||||
}
|
||||
|
||||
test "SliceOutStream" {
|
||||
var buffer: [10]u8 = undefined;
|
||||
var ss = io.SliceOutStream.init(buffer[0..]);
|
||||
|
||||
try ss.stream.write("Hello");
|
||||
assert(mem.eql(u8, ss.getWritten(), "Hello"));
|
||||
|
||||
try ss.stream.write("world");
|
||||
assert(mem.eql(u8, ss.getWritten(), "Helloworld"));
|
||||
|
||||
assertError(ss.stream.write("!"), error.OutOfSpace);
|
||||
assert(mem.eql(u8, ss.getWritten(), "Helloworld"));
|
||||
|
||||
ss.reset();
|
||||
assert(ss.getWritten().len == 0);
|
||||
|
||||
assertError(ss.stream.write("Hello world!"), error.OutOfSpace);
|
||||
assert(mem.eql(u8, ss.getWritten(), "Hello worl"));
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ comptime {
|
||||
_ = @import("cases/bugs/828.zig");
|
||||
_ = @import("cases/bugs/920.zig");
|
||||
_ = @import("cases/byval_arg_var.zig");
|
||||
_ = @import("cases/cancel.zig");
|
||||
_ = @import("cases/cast.zig");
|
||||
_ = @import("cases/const_slice_child.zig");
|
||||
_ = @import("cases/coroutine_await_struct.zig");
|
||||
|
92
test/cases/cancel.zig
Normal file
92
test/cases/cancel.zig
Normal file
@ -0,0 +1,92 @@
|
||||
const std = @import("std");
|
||||
|
||||
var defer_f1: bool = false;
|
||||
var defer_f2: bool = false;
|
||||
var defer_f3: bool = false;
|
||||
|
||||
test "cancel forwards" {
|
||||
var da = std.heap.DirectAllocator.init();
|
||||
defer da.deinit();
|
||||
|
||||
const p = async<&da.allocator> f1() catch unreachable;
|
||||
cancel p;
|
||||
std.debug.assert(defer_f1);
|
||||
std.debug.assert(defer_f2);
|
||||
std.debug.assert(defer_f3);
|
||||
}
|
||||
|
||||
async fn f1() void {
|
||||
defer {
|
||||
defer_f1 = true;
|
||||
}
|
||||
await (async f2() catch unreachable);
|
||||
}
|
||||
|
||||
async fn f2() void {
|
||||
defer {
|
||||
defer_f2 = true;
|
||||
}
|
||||
await (async f3() catch unreachable);
|
||||
}
|
||||
|
||||
async fn f3() void {
|
||||
defer {
|
||||
defer_f3 = true;
|
||||
}
|
||||
suspend;
|
||||
}
|
||||
|
||||
var defer_b1: bool = false;
|
||||
var defer_b2: bool = false;
|
||||
var defer_b3: bool = false;
|
||||
var defer_b4: bool = false;
|
||||
|
||||
test "cancel backwards" {
|
||||
var da = std.heap.DirectAllocator.init();
|
||||
defer da.deinit();
|
||||
|
||||
const p = async<&da.allocator> b1() catch unreachable;
|
||||
cancel p;
|
||||
std.debug.assert(defer_b1);
|
||||
std.debug.assert(defer_b2);
|
||||
std.debug.assert(defer_b3);
|
||||
std.debug.assert(defer_b4);
|
||||
}
|
||||
|
||||
async fn b1() void {
|
||||
defer {
|
||||
defer_b1 = true;
|
||||
}
|
||||
await (async b2() catch unreachable);
|
||||
}
|
||||
|
||||
var b4_handle: promise = undefined;
|
||||
|
||||
async fn b2() void {
|
||||
const b3_handle = async b3() catch unreachable;
|
||||
resume b4_handle;
|
||||
cancel b4_handle;
|
||||
defer {
|
||||
defer_b2 = true;
|
||||
}
|
||||
const value = await b3_handle;
|
||||
@panic("unreachable");
|
||||
}
|
||||
|
||||
async fn b3() i32 {
|
||||
defer {
|
||||
defer_b3 = true;
|
||||
}
|
||||
await (async b4() catch unreachable);
|
||||
return 1234;
|
||||
}
|
||||
|
||||
async fn b4() void {
|
||||
defer {
|
||||
defer_b4 = true;
|
||||
}
|
||||
suspend |p| {
|
||||
b4_handle = p;
|
||||
}
|
||||
suspend;
|
||||
}
|
@ -244,8 +244,8 @@ test "break from suspend" {
|
||||
std.debug.assert(my_result == 2);
|
||||
}
|
||||
async fn testBreakFromSuspend(my_result: *i32) void {
|
||||
s: suspend |p| {
|
||||
break :s;
|
||||
suspend |p| {
|
||||
resume p;
|
||||
}
|
||||
my_result.* += 1;
|
||||
suspend;
|
||||
|
@ -1,6 +1,64 @@
|
||||
const tests = @import("tests.zig");
|
||||
|
||||
pub fn addCases(cases: *tests.CompileErrorContext) void {
|
||||
cases.add(
|
||||
"while loop body expression ignored",
|
||||
\\fn returns() usize {
|
||||
\\ return 2;
|
||||
\\}
|
||||
\\export fn f1() void {
|
||||
\\ while (true) returns();
|
||||
\\}
|
||||
\\export fn f2() void {
|
||||
\\ var x: ?i32 = null;
|
||||
\\ while (x) |_| returns();
|
||||
\\}
|
||||
\\export fn f3() void {
|
||||
\\ var x: error!i32 = error.Bad;
|
||||
\\ while (x) |_| returns() else |_| unreachable;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:5:25: error: expression value is ignored",
|
||||
".tmp_source.zig:9:26: error: expression value is ignored",
|
||||
".tmp_source.zig:13:26: error: expression value is ignored",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"missing parameter name of generic function",
|
||||
\\fn dump(var) void {}
|
||||
\\export fn entry() void {
|
||||
\\ var a: u8 = 9;
|
||||
\\ dump(a);
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:1:9: error: missing parameter name",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"non-inline for loop on a type that requires comptime",
|
||||
\\const Foo = struct {
|
||||
\\ name: []const u8,
|
||||
\\ T: type,
|
||||
\\};
|
||||
\\export fn entry() void {
|
||||
\\ const xx: [2]Foo = undefined;
|
||||
\\ for (xx) |f| {}
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:7:15: error: variable of type 'Foo' must be const or comptime",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"generic fn as parameter without comptime keyword",
|
||||
\\fn f(_: fn (var) void) void {}
|
||||
\\fn g(_: var) void {}
|
||||
\\export fn entry() void {
|
||||
\\ f(g);
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:1:9: error: parameter of type 'fn(var)var' must be declared comptime",
|
||||
);
|
||||
|
||||
cases.add(
|
||||
"optional pointer to void in extern struct",
|
||||
\\comptime {
|
||||
|
@ -2,6 +2,7 @@ const std = @import("std");
|
||||
const TestContext = @import("../../src-self-hosted/test.zig").TestContext;
|
||||
|
||||
pub fn addCases(ctx: *TestContext) !void {
|
||||
// hello world
|
||||
try ctx.testCompareOutputLibC(
|
||||
\\extern fn puts([*]const u8) void;
|
||||
\\export fn main() c_int {
|
||||
@ -9,4 +10,16 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "Hello, world!" ++ std.cstr.line_sep);
|
||||
|
||||
// function calling another function
|
||||
try ctx.testCompareOutputLibC(
|
||||
\\extern fn puts(s: [*]const u8) void;
|
||||
\\export fn main() c_int {
|
||||
\\ return foo(c"OK");
|
||||
\\}
|
||||
\\fn foo(s: [*]const u8) c_int {
|
||||
\\ puts(s);
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "OK" ++ std.cstr.line_sep);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user