Sema: better source location for function call args

This commit is contained in:
Veikka Tuominen 2022-07-20 00:02:20 +03:00 committed by Andrew Kelley
parent 79ef0cdf30
commit 821e4063f9
18 changed files with 301 additions and 153 deletions

View File

@ -5911,6 +5911,31 @@ pub fn paramSrc(
}
}
pub fn argSrc(
call_node_offset: i32,
gpa: Allocator,
decl: *Decl,
arg_i: usize,
) LazySrcLoc {
@setCold(true);
const tree = decl.getFileScope().getTree(gpa) catch |err| {
// In this case we emit a warning + a less precise source location.
log.warn("unable to load {s}: {s}", .{
decl.getFileScope().sub_file_path, @errorName(err),
});
return LazySrcLoc.nodeOffset(0);
};
const node_tags = tree.nodes.items(.tag);
const node = decl.relativeToNodeIndex(call_node_offset);
var args: [1]Ast.Node.Index = undefined;
const full = switch (node_tags[node]) {
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(&args, node),
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
else => unreachable,
};
return LazySrcLoc.nodeOffset(decl.nodeIndexToRelative(full.ast.params[arg_i]));
}
/// Called from `performAllTheWork`, after all AstGen workers have finished,
/// and before the main semantic analysis loop begins.
pub fn processOutdatedAndDeletedDecls(mod: *Module) !void {

View File

@ -5613,84 +5613,39 @@ fn analyzeCall(
// which means its parameter type expressions must be resolved in order and used
// to successively coerce the arguments.
const fn_info = sema.code.getFnInfo(module_fn.zir_body_inst);
const zir_tags = sema.code.instructions.items(.tag);
var arg_i: usize = 0;
for (fn_info.param_body) |inst| switch (zir_tags[inst]) {
.param, .param_comptime => {
// Evaluate the parameter type expression now that previous ones have
// been mapped, and coerce the corresponding argument to it.
const pl_tok = sema.code.instructions.items(.data)[inst].pl_tok;
const param_src = pl_tok.src();
const extra = sema.code.extraData(Zir.Inst.Param, pl_tok.payload_index);
const param_body = sema.code.extra[extra.end..][0..extra.data.body_len];
const param_ty_inst = try sema.resolveBody(&child_block, param_body, inst);
const param_ty = try sema.analyzeAsType(&child_block, param_src, param_ty_inst);
new_fn_info.param_types[arg_i] = param_ty;
const arg_src = call_src; // TODO: better source location
const casted_arg = try sema.coerce(&child_block, param_ty, uncasted_args[arg_i], arg_src);
try sema.inst_map.putNoClobber(gpa, inst, casted_arg);
if (is_comptime_call) {
// TODO explain why function is being called at comptime
const arg_val = try sema.resolveConstMaybeUndefVal(&child_block, arg_src, casted_arg, "argument to function being called at comptime must be comptime known");
switch (arg_val.tag()) {
.generic_poison, .generic_poison_type => {
// This function is currently evaluated as part of an as-of-yet unresolvable
// parameter or return type.
return error.GenericPoison;
},
else => {
// Needed so that lazy values do not trigger
// assertion due to type not being resolved
// when the hash function is called.
try sema.resolveLazyValue(&child_block, arg_src, arg_val);
},
}
should_memoize = should_memoize and !arg_val.canMutateComptimeVarState();
memoized_call_key.args[arg_i] = .{
.ty = param_ty,
.val = arg_val,
};
}
arg_i += 1;
continue;
},
.param_anytype, .param_anytype_comptime => {
// No coercion needed.
const uncasted_arg = uncasted_args[arg_i];
new_fn_info.param_types[arg_i] = sema.typeOf(uncasted_arg);
try sema.inst_map.putNoClobber(gpa, inst, uncasted_arg);
if (is_comptime_call) {
const arg_src = call_src; // TODO: better source location
// TODO explain why function is being called at comptime
const arg_val = try sema.resolveConstMaybeUndefVal(&child_block, arg_src, uncasted_arg, "argument to function being called at comptime must be comptime known");
switch (arg_val.tag()) {
.generic_poison, .generic_poison_type => {
// This function is currently evaluated as part of an as-of-yet unresolvable
// parameter or return type.
return error.GenericPoison;
},
else => {
// Needed so that lazy values do not trigger
// assertion due to type not being resolved
// when the hash function is called.
try sema.resolveLazyValue(&child_block, arg_src, arg_val);
},
}
should_memoize = should_memoize and !arg_val.canMutateComptimeVarState();
memoized_call_key.args[arg_i] = .{
.ty = sema.typeOf(uncasted_arg),
.val = arg_val,
};
}
arg_i += 1;
continue;
},
else => continue,
};
for (fn_info.param_body) |inst| {
sema.analyzeInlineCallArg(
&child_block,
.unneeded,
inst,
new_fn_info,
&arg_i,
uncasted_args,
is_comptime_call,
&should_memoize,
memoized_call_key,
) catch |err| switch (err) {
error.NeededSourceLocation => {
const decl = sema.mod.declPtr(block.src_decl);
try sema.analyzeInlineCallArg(
// Intentionally use the wrong block here since we know it's
// going to fail and `argSrc` is relative to `block.src_decl`.
block,
Module.argSrc(call_src.node_offset.x, sema.gpa, decl, arg_i),
inst,
new_fn_info,
&arg_i,
uncasted_args,
is_comptime_call,
&should_memoize,
memoized_call_key,
);
return error.AnalysisFail;
},
else => |e| return e,
};
}
// In case it is a generic function with an expression for the return type that depends
// on parameters, we must now do the same for the return type as we just did with
@ -5746,6 +5701,7 @@ fn analyzeCall(
if (!is_comptime_call) {
try sema.emitDbgInline(block, parent_func.?, module_fn, new_func_resolved_ty, .dbg_inline_begin);
const zir_tags = sema.code.instructions.items(.tag);
for (fn_info.param_body) |param| switch (zir_tags[param]) {
.param, .param_comptime => {
const inst_data = sema.code.instructions.items(.data)[param].pl_tok;
@ -5826,11 +5782,26 @@ fn analyzeCall(
const args = try sema.arena.alloc(Air.Inst.Ref, uncasted_args.len);
for (uncasted_args) |uncasted_arg, i| {
const arg_src = call_src; // TODO: better source location
if (i < fn_params_len) {
const param_ty = func_ty.fnParamType(i);
try sema.resolveTypeFully(block, arg_src, param_ty);
args[i] = try sema.coerce(block, param_ty, uncasted_arg, arg_src);
args[i] = sema.analyzeCallArg(
block,
.unneeded,
param_ty,
uncasted_arg,
) catch |err| switch (err) {
error.NeededSourceLocation => {
const decl = sema.mod.declPtr(block.src_decl);
_ = try sema.analyzeCallArg(
block,
Module.argSrc(call_src.node_offset.x, sema.gpa, decl, i),
param_ty,
uncasted_arg,
);
return error.AnalysisFail;
},
else => |e| return e,
};
} else {
args[i] = uncasted_arg;
}
@ -5862,6 +5833,136 @@ fn analyzeCall(
return result;
}
fn analyzeInlineCallArg(
sema: *Sema,
block: *Block,
arg_src: LazySrcLoc,
inst: Zir.Inst.Index,
new_fn_info: Type.Payload.Function.Data,
arg_i: *usize,
uncasted_args: []const Air.Inst.Ref,
is_comptime_call: bool,
should_memoize: *bool,
memoized_call_key: Module.MemoizedCall.Key,
) !void {
const zir_tags = sema.code.instructions.items(.tag);
switch (zir_tags[inst]) {
.param, .param_comptime => {
// Evaluate the parameter type expression now that previous ones have
// been mapped, and coerce the corresponding argument to it.
const pl_tok = sema.code.instructions.items(.data)[inst].pl_tok;
const param_src = pl_tok.src();
const extra = sema.code.extraData(Zir.Inst.Param, pl_tok.payload_index);
const param_body = sema.code.extra[extra.end..][0..extra.data.body_len];
const param_ty_inst = try sema.resolveBody(block, param_body, inst);
const param_ty = try sema.analyzeAsType(block, param_src, param_ty_inst);
new_fn_info.param_types[arg_i.*] = param_ty;
const uncasted_arg = uncasted_args[arg_i.*];
if (try sema.typeRequiresComptime(block, arg_src, param_ty)) {
_ = try sema.resolveConstMaybeUndefVal(block, arg_src, uncasted_arg, "argument to parameter with comptime only type must be comptime known");
}
const casted_arg = try sema.coerce(block, param_ty, uncasted_arg, arg_src);
try sema.inst_map.putNoClobber(sema.gpa, inst, casted_arg);
if (is_comptime_call) {
// TODO explain why function is being called at comptime
const arg_val = try sema.resolveConstMaybeUndefVal(block, arg_src, casted_arg, "argument to function being called at comptime must be comptime known");
switch (arg_val.tag()) {
.generic_poison, .generic_poison_type => {
// This function is currently evaluated as part of an as-of-yet unresolvable
// parameter or return type.
return error.GenericPoison;
},
else => {
// Needed so that lazy values do not trigger
// assertion due to type not being resolved
// when the hash function is called.
try sema.resolveLazyValue(block, arg_src, arg_val);
},
}
should_memoize.* = should_memoize.* and !arg_val.canMutateComptimeVarState();
memoized_call_key.args[arg_i.*] = .{
.ty = param_ty,
.val = arg_val,
};
}
arg_i.* += 1;
},
.param_anytype, .param_anytype_comptime => {
// No coercion needed.
const uncasted_arg = uncasted_args[arg_i.*];
new_fn_info.param_types[arg_i.*] = sema.typeOf(uncasted_arg);
try sema.inst_map.putNoClobber(sema.gpa, inst, uncasted_arg);
if (is_comptime_call) {
// TODO explain why function is being called at comptime
const arg_val = try sema.resolveConstMaybeUndefVal(block, arg_src, uncasted_arg, "argument to function being called at comptime must be comptime known");
switch (arg_val.tag()) {
.generic_poison, .generic_poison_type => {
// This function is currently evaluated as part of an as-of-yet unresolvable
// parameter or return type.
return error.GenericPoison;
},
else => {
// Needed so that lazy values do not trigger
// assertion due to type not being resolved
// when the hash function is called.
try sema.resolveLazyValue(block, arg_src, arg_val);
},
}
should_memoize.* = should_memoize.* and !arg_val.canMutateComptimeVarState();
memoized_call_key.args[arg_i.*] = .{
.ty = sema.typeOf(uncasted_arg),
.val = arg_val,
};
}
arg_i.* += 1;
},
else => {},
}
}
fn analyzeCallArg(
sema: *Sema,
block: *Block,
arg_src: LazySrcLoc,
param_ty: Type,
uncasted_arg: Air.Inst.Ref,
) !Air.Inst.Ref {
try sema.resolveTypeFully(block, arg_src, param_ty);
return sema.coerce(block, param_ty, uncasted_arg, arg_src);
}
fn analyzeGenericCallArg(
sema: *Sema,
block: *Block,
arg_src: LazySrcLoc,
uncasted_arg: Air.Inst.Ref,
comptime_arg: TypedValue,
runtime_args: []Air.Inst.Ref,
new_fn_info: Type.Payload.Function.Data,
runtime_i: *u32,
) !void {
const is_runtime = comptime_arg.val.tag() == .generic_poison and
comptime_arg.ty.hasRuntimeBits() and
!(try sema.typeRequiresComptime(block, arg_src, comptime_arg.ty));
if (is_runtime) {
const param_ty = new_fn_info.param_types[runtime_i.*];
const casted_arg = try sema.coerce(block, param_ty, uncasted_arg, arg_src);
try sema.queueFullTypeResolution(param_ty);
runtime_args[runtime_i.*] = casted_arg;
runtime_i.* += 1;
}
}
fn analyzeGenericCallArgVal(sema: *Sema, block: *Block, arg_src: LazySrcLoc, uncasted_arg: Air.Inst.Ref) !Value {
const arg_val = try sema.resolveValue(block, arg_src, uncasted_arg, "parameter is comptime");
try sema.resolveLazyValue(block, arg_src, arg_val);
return arg_val;
}
fn instantiateGenericCall(
sema: *Sema,
block: *Block,
@ -5927,10 +6028,16 @@ fn instantiateGenericCall(
}
if (is_comptime) {
const arg_src = call_src; // TODO better source location
const arg_ty = sema.typeOf(uncasted_args[i]);
const arg_val = try sema.resolveValue(block, arg_src, uncasted_args[i], "parameter is comptime");
try sema.resolveLazyValue(block, arg_src, arg_val);
const arg_val = sema.analyzeGenericCallArgVal(block, .unneeded, uncasted_args[i]) catch |err| switch (err) {
error.NeededSourceLocation => {
const decl = sema.mod.declPtr(block.src_decl);
const arg_src = Module.argSrc(call_src.node_offset.x, sema.gpa, decl, i);
_ = try sema.analyzeGenericCallArgVal(block, arg_src, uncasted_args[i]);
return error.AnalysisFail;
},
else => |e| return e,
};
arg_val.hash(arg_ty, &hasher, mod);
if (is_anytype) {
arg_ty.hashWithHasher(&hasher, mod);
@ -6086,19 +6193,18 @@ fn instantiateGenericCall(
},
else => continue,
}
const arg_src = call_src; // TODO: better source location
const arg = uncasted_args[arg_i];
if (is_comptime) {
if (try sema.resolveMaybeUndefVal(block, arg_src, arg)) |arg_val| {
if (try sema.resolveMaybeUndefVal(block, .unneeded, arg)) |arg_val| {
const child_arg = try child_sema.addConstant(sema.typeOf(arg), arg_val);
child_sema.inst_map.putAssumeCapacityNoClobber(inst, child_arg);
} else {
return sema.failWithNeededComptime(block, arg_src, "parameter is comptime");
return sema.failWithNeededComptime(block, .unneeded, undefined);
}
} else if (is_anytype) {
const arg_ty = sema.typeOf(arg);
if (try sema.typeRequiresComptime(block, arg_src, arg_ty)) {
const arg_val = try sema.resolveConstValue(block, arg_src, arg, "type of anytype parameter requires comptime");
if (try sema.typeRequiresComptime(block, .unneeded, arg_ty)) {
const arg_val = try sema.resolveConstValue(block, .unneeded, arg, undefined);
const child_arg = try child_sema.addConstant(arg_ty, arg_val);
child_sema.inst_map.putAssumeCapacityNoClobber(inst, child_arg);
} else {
@ -6156,8 +6262,7 @@ fn instantiateGenericCall(
const copied_arg_ty = try child_sema.typeOf(arg).copy(new_decl_arena_allocator);
anytype_args[arg_i] = is_anytype;
const arg_src = call_src; // TODO: better source location
if (try sema.typeRequiresComptime(block, arg_src, copied_arg_ty)) {
if (try sema.typeRequiresComptime(block, .unneeded, copied_arg_ty)) {
is_comptime = true;
}
@ -6236,18 +6341,30 @@ fn instantiateGenericCall(
.param_comptime, .param_anytype_comptime, .param, .param_anytype => {},
else => continue,
}
const arg_src = call_src; // TODO: better source location
const is_runtime = comptime_args[total_i].val.tag() == .generic_poison and
comptime_args[total_i].ty.hasRuntimeBits() and
!(try sema.typeRequiresComptime(block, arg_src, comptime_args[total_i].ty));
if (is_runtime) {
const param_ty = new_fn_info.param_types[runtime_i];
const uncasted_arg = uncasted_args[total_i];
const casted_arg = try sema.coerce(block, param_ty, uncasted_arg, arg_src);
try sema.queueFullTypeResolution(param_ty);
runtime_args[runtime_i] = casted_arg;
runtime_i += 1;
}
sema.analyzeGenericCallArg(
block,
.unneeded,
uncasted_args[total_i],
comptime_args[total_i],
runtime_args,
new_fn_info,
&runtime_i,
) catch |err| switch (err) {
error.NeededSourceLocation => {
const decl = sema.mod.declPtr(block.src_decl);
_ = try sema.analyzeGenericCallArg(
block,
Module.argSrc(call_src.node_offset.x, sema.gpa, decl, total_i),
uncasted_args[total_i],
comptime_args[total_i],
runtime_args,
new_fn_info,
&runtime_i,
);
return error.AnalysisFail;
},
else => |e| return e,
};
total_i += 1;
}

View File

@ -7,4 +7,4 @@ pub extern fn foo(format: *const u8, ...) void;
// backend=stage2
// target=native
//
// :2:8: error: expected type '*const u8', found '[5:0]u8'
// :2:16: error: expected type '*const u8', found '[5:0]u8'

View File

@ -18,6 +18,6 @@ export fn entry() usize { return @sizeOf(@TypeOf(&foo)); }
// backend=stage2
// target=native
//
// :8:15: error: expected type '*const u3', found '*align(0:3:1) const u3'
// :8:15: note: pointer host size '1' cannot cast into pointer host size '0'
// :8:15: note: pointer bit offset '3' cannot cast into pointer bit offset '0'
// :8:16: error: expected type '*const u3', found '*align(0:3:1) const u3'
// :8:16: note: pointer host size '1' cannot cast into pointer host size '0'
// :8:16: note: pointer bit offset '3' cannot cast into pointer bit offset '0'

View File

@ -1,12 +1,13 @@
extern fn puts(s: [*:0]const u8) c_int;
pub fn main() void {
pub export fn entry() void {
const no_zero_array = [_]u8{'h', 'e', 'l', 'l', 'o'};
const no_zero_ptr: [*]const u8 = &no_zero_array;
_ = puts(no_zero_ptr);
}
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:5:14: error: expected type '[*:0]const u8', found '[*]const u8'
// :5:14: error: expected type '[*:0]const u8', found '[*]const u8'
// :5:14: note: destination pointer requires '0' sentinel

View File

@ -8,6 +8,6 @@ export fn entry() void {
// backend=stage2
// target=native
//
// :4:17: error: expected type '*const fn(i32) void', found '*const fn(bool) void'
// :4:17: note: pointer type child 'fn(bool) void' cannot cast into pointer type child 'fn(i32) void'
// :4:17: note: parameter 0 'bool' cannot cast into 'i32'
// :4:18: error: expected type '*const fn(i32) void', found '*const fn(bool) void'
// :4:18: note: pointer type child 'fn(bool) void' cannot cast into pointer type child 'fn(i32) void'
// :4:18: note: parameter 0 'bool' cannot cast into 'i32'

View File

@ -27,5 +27,4 @@ export fn entry4() void {
// :1:17: note: opaque declared here
// :8:28: error: parameter of type '@TypeOf(null)' not allowed
// :12:8: error: parameter of opaque type 'tmp.FooType' not allowed
// :1:17: note: opaque declared here
// :17:8: error: parameter of type '@TypeOf(null)' not allowed

View File

@ -0,0 +1,13 @@
fn foo(comptime x: i32, y: i32) i32 { return x + y; }
fn test1(a: i32, b: i32) i32 {
return foo(a, b);
}
export fn entry() usize { return @sizeOf(@TypeOf(&test1)); }
// error
// backend=stage2
// target=native
//
// :3:16: error: unable to resolve comptime value
// :3:16: note: parameter is comptime

View File

@ -5,7 +5,7 @@ export fn entry() void {
}
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:4:9: error: expected type '[]i32', found '[10]i32'
// :4:9: error: array literal requires address-of operator (&) to coerce to slice type '[]i32'

View File

@ -13,7 +13,8 @@ fn bar(x: *u32) void {
}
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:8:13: error: expected type '*u32', found '*align(1) u32'
// :8:9: error: expected type '*u32', found '*align(1) u32'
// :8:9: note: pointer alignment '1' cannot cast into pointer alignment '4'

View File

@ -8,10 +8,11 @@ fn ptrEql(a: *[]const u8, b: *[]const u8) bool {
return true;
}
export fn entry() usize { return @sizeOf(@TypeOf(foo)); }
export fn entry() usize { return @sizeOf(@TypeOf(&foo)); }
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:4:19: error: expected type '*[]const u8', found '*const []const u8'
// :4:19: error: expected type '*[]const u8', found '*const []const u8'
// :4:19: note: cast discards const qualifier

View File

@ -1,12 +0,0 @@
fn foo(comptime x: i32, y: i32) i32 { return x + y; }
fn test1(a: i32, b: i32) i32 {
return foo(a, b);
}
export fn entry() usize { return @sizeOf(@TypeOf(test1)); }
// error
// backend=stage1
// target=native
//
// tmp.zig:3:16: error: runtime value cannot be passed to comptime arg

View File

@ -1,12 +0,0 @@
const Derp = opaque {};
extern fn bar(d: *Derp) void;
export fn foo() void {
var x = @as(u8, 1);
bar(@ptrCast(*anyopaque, &x));
}
// error
// backend=stage1
// target=native
//
// tmp.zig:5:9: error: expected type '*Derp', found '*anyopaque'

View File

@ -10,6 +10,6 @@ export fn entry() void {
// backend=stage2
// target=native
//
// :6:6: error: expected type '*const fn(*const u8) void', found '*const fn(u8) void'
// :6:6: note: pointer type child 'fn(u8) void' cannot cast into pointer type child 'fn(*const u8) void'
// :6:6: note: parameter 0 'u8' cannot cast into '*const u8'
// :6:7: error: expected type '*const fn(*const u8) void', found '*const fn(u8) void'
// :6:7: note: pointer type child 'fn(u8) void' cannot cast into pointer type child 'fn(*const u8) void'
// :6:7: note: parameter 0 'u8' cannot cast into '*const u8'

View File

@ -2,7 +2,7 @@ fn f(a: noreturn) void { _ = a; }
export fn entry() void { f(); }
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:1:9: error: parameter of type 'noreturn' not allowed
// :1:6: error: parameter of type 'noreturn' not allowed

View File

@ -4,7 +4,7 @@ export fn entry() void {
}
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:2:20: error: return type cannot be opaque
// :2:20: error: opaque return type 'anyopaque' not allowed

View File

@ -10,7 +10,8 @@ export fn entry() void {
}
// error
// backend=stage1
// backend=stage2
// target=native
//
// tmp.zig:9:8: error: use of undefined value here causes undefined behavior
// :9:14: error: expected type 'type', found 'tmp.U'
// :1:11: note: union declared here

View File

@ -0,0 +1,14 @@
const Derp = opaque {};
extern fn bar(d: *Derp) void;
export fn foo() void {
var x = @as(u8, 1);
bar(@ptrCast(*anyopaque, &x));
}
// error
// backend=stage2
// target=native
//
// :5:9: error: expected type '*tmp.Derp', found '*anyopaque'
// :5:9: note: pointer type child 'anyopaque' cannot cast into pointer type child 'tmp.Derp'
// :1:14: note: opaque declared here