forExpr
Function parameters
Parameters
- parent_gz:*GenZir
- scope:*Scope
- node:Ast.Node.Index
- for_full:Ast.full.For
- is_statement:bool
Functions in this namespace
Functions
Source
Implementation
fn forExpr(
parent_gz: *GenZir,
scope: *Scope,
ri: ResultInfo,
node: Ast.Node.Index,
for_full: Ast.full.For,
is_statement: bool,
) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
if (for_full.label_token) |label_token| {
try astgen.checkLabelRedefinition(scope, label_token);
}
const need_rl = astgen.nodes_need_rl.contains(node);
const block_ri: ResultInfo = if (need_rl) ri else .{
.rl = switch (ri.rl) {
.ptr => .{ .ty = (try ri.rl.resultType(parent_gz, node)).? },
.inferred_ptr => .none,
else => ri.rl,
},
.ctx = ri.ctx,
};
// We need to call `rvalue` to write through to the pointer only if we had a
// result pointer and aren't forwarding it.
const LocTag = @typeInfo(ResultInfo.Loc).@"union".tag_type.?;
const need_result_rvalue = @as(LocTag, block_ri.rl) != @as(LocTag, ri.rl);
const is_inline = for_full.inline_token != null;
if (parent_gz.is_comptime and is_inline) {
try astgen.appendErrorTok(for_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
}
const tree = astgen.tree;
const gpa = astgen.gpa;
// For counters, this is the start value; for indexables, this is the base
// pointer that can be used with elem_ptr and similar instructions.
// Special value `none` means that this is a counter and its start value is
// zero, indicating that the main index counter can be used directly.
const indexables = try gpa.alloc(Zir.Inst.Ref, for_full.ast.inputs.len);
defer gpa.free(indexables);
// elements of this array can be `none`, indicating no length check.
const lens = try gpa.alloc([2]Zir.Inst.Ref, for_full.ast.inputs.len);
defer gpa.free(lens);
// We will use a single zero-based counter no matter how many indexables there are.
const index_ptr = blk: {
const alloc_tag: Zir.Inst.Tag = if (is_inline) .alloc_comptime_mut else .alloc;
const index_ptr = try parent_gz.addUnNode(alloc_tag, .usize_type, node);
// initialize to zero
_ = try parent_gz.addPlNode(.store_node, node, Zir.Inst.Bin{
.lhs = index_ptr,
.rhs = .zero_usize,
});
break :blk index_ptr;
};
var any_len_checks = false;
{
var capture_token = for_full.payload_token;
for (for_full.ast.inputs, indexables, lens) |input, *indexable_ref, *len_refs| {
const capture_is_ref = tree.tokenTag(capture_token) == .asterisk;
const ident_tok = capture_token + @intFromBool(capture_is_ref);
const is_discard = mem.eql(u8, tree.tokenSlice(ident_tok), "_");
if (is_discard and capture_is_ref) {
return astgen.failTok(capture_token, "pointer modifier invalid on discard", .{});
}
// Skip over the comma, and on to the next capture (or the ending pipe character).
capture_token = ident_tok + 2;
try emitDbgNode(parent_gz, input);
if (tree.nodeTag(input) == .for_range) {
if (capture_is_ref) {
return astgen.failTok(ident_tok, "cannot capture reference to range", .{});
}
const start_node, const end_node = tree.nodeData(input).node_and_opt_node;
const start_val = try expr(parent_gz, scope, .{ .rl = .{ .ty = .usize_type } }, start_node);
const end_val = if (end_node.unwrap()) |end|
try expr(parent_gz, scope, .{ .rl = .{ .ty = .usize_type } }, end)
else
.none;
if (end_val == .none and is_discard) {
try astgen.appendErrorTok(ident_tok, "discard of unbounded counter", .{});
}
if (end_val == .none) {
len_refs.* = .{ .none, .none };
} else {
any_len_checks = true;
len_refs.* = .{ start_val, end_val };
}
const start_is_zero = nodeIsTriviallyZero(tree, start_node);
indexable_ref.* = if (start_is_zero) .none else start_val;
} else {
const indexable = try expr(parent_gz, scope, .{ .rl = .none }, input);
any_len_checks = true;
indexable_ref.* = indexable;
len_refs.* = .{ indexable, .none };
}
}
}
if (!any_len_checks) {
return astgen.failNode(node, "unbounded for loop", .{});
}
// We use a dedicated ZIR instruction to assert the lengths to assist with
// nicer error reporting as well as fewer ZIR bytes emitted.
const len: Zir.Inst.Ref = len: {
const all_lens = @as([*]Zir.Inst.Ref, @ptrCast(lens))[0 .. lens.len * 2];
const lens_len: u32 = @intCast(all_lens.len);
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.MultiOp).@"struct".fields.len + lens_len);
const len = try parent_gz.addPlNode(.for_len, node, Zir.Inst.MultiOp{
.operands_len = lens_len,
});
appendRefsAssumeCapacity(astgen, all_lens);
break :len len;
};
const loop_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .loop;
const loop_block = try parent_gz.makeBlockInst(loop_tag, node);
try parent_gz.instructions.append(gpa, loop_block);
var loop_scope = parent_gz.makeSubBlock(scope);
loop_scope.is_inline = is_inline;
loop_scope.setBreakResultInfo(block_ri);
defer loop_scope.unstack();
// We need to finish loop_scope later once we have the deferred refs from then_scope. However, the
// load must be removed from instructions in the meantime or it appears to be part of parent_gz.
const index = try loop_scope.addUnNode(.load, index_ptr, node);
_ = loop_scope.instructions.pop();
var cond_scope = parent_gz.makeSubBlock(&loop_scope.base);
defer cond_scope.unstack();
// Check the condition.
const cond = try cond_scope.addPlNode(.cmp_lt, node, Zir.Inst.Bin{
.lhs = index,
.rhs = len,
});
const condbr_tag: Zir.Inst.Tag = if (is_inline) .condbr_inline else .condbr;
const condbr = try cond_scope.addCondBr(condbr_tag, node);
const block_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .block;
const cond_block = try loop_scope.makeBlockInst(block_tag, node);
try cond_scope.setBlockBody(cond_block);
loop_scope.break_block = loop_block.toOptional();
loop_scope.continue_block = cond_block.toOptional();
if (for_full.label_token) |label_token| {
loop_scope.label = .{
.token = label_token,
.block_inst = loop_block,
};
}
const then_node = for_full.ast.then_expr;
var then_scope = parent_gz.makeSubBlock(&cond_scope.base);
defer then_scope.unstack();
const capture_scopes = try gpa.alloc(Scope.LocalVal, for_full.ast.inputs.len);
defer gpa.free(capture_scopes);
const then_sub_scope = blk: {
var capture_token = for_full.payload_token;
var capture_sub_scope: *Scope = &then_scope.base;
for (for_full.ast.inputs, indexables, capture_scopes) |input, indexable_ref, *capture_scope| {
const capture_is_ref = tree.tokenTag(capture_token) == .asterisk;
const ident_tok = capture_token + @intFromBool(capture_is_ref);
const capture_name = tree.tokenSlice(ident_tok);
// Skip over the comma, and on to the next capture (or the ending pipe character).
capture_token = ident_tok + 2;
if (mem.eql(u8, capture_name, "_")) continue;
const name_str_index = try astgen.identAsString(ident_tok);
try astgen.detectLocalShadowing(capture_sub_scope, name_str_index, ident_tok, capture_name, .capture);
const capture_inst = inst: {
const is_counter = tree.nodeTag(input) == .for_range;
if (indexable_ref == .none) {
// Special case: the main index can be used directly.
assert(is_counter);
assert(!capture_is_ref);
break :inst index;
}
// For counters, we add the index variable to the start value; for
// indexables, we use it as an element index. This is so similar
// that they can share the same code paths, branching only on the
// ZIR tag.
const switch_cond = (@as(u2, @intFromBool(capture_is_ref)) << 1) | @intFromBool(is_counter);
const tag: Zir.Inst.Tag = switch (switch_cond) {
0b00 => .elem_val,
0b01 => .add,
0b10 => .elem_ptr,
0b11 => unreachable, // compile error emitted already
};
break :inst try then_scope.addPlNode(tag, input, Zir.Inst.Bin{
.lhs = indexable_ref,
.rhs = index,
});
};
capture_scope.* = .{
.parent = capture_sub_scope,
.gen_zir = &then_scope,
.name = name_str_index,
.inst = capture_inst,
.token_src = ident_tok,
.id_cat = .capture,
};
try then_scope.addDbgVar(.dbg_var_val, name_str_index, capture_inst);
capture_sub_scope = &capture_scope.base;
}
break :blk capture_sub_scope;
};
const then_result = try fullBodyExpr(&then_scope, then_sub_scope, .{ .rl = .none }, then_node, .allow_branch_hint);
_ = try addEnsureResult(&then_scope, then_result, then_node);
try checkUsed(parent_gz, &then_scope.base, then_sub_scope);
astgen.advanceSourceCursor(tree.tokenStart(tree.lastToken(then_node)));
try emitDbgStmt(parent_gz, .{ astgen.source_line - parent_gz.decl_line, astgen.source_column });
_ = try parent_gz.add(.{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .dbg_empty_stmt,
.small = undefined,
.operand = undefined,
} },
});
const break_tag: Zir.Inst.Tag = if (is_inline) .break_inline else .@"break";
_ = try then_scope.addBreak(break_tag, cond_block, .void_value);
var else_scope = parent_gz.makeSubBlock(&cond_scope.base);
defer else_scope.unstack();
if (for_full.ast.else_expr.unwrap()) |else_node| {
const sub_scope = &else_scope.base;
// Remove the continue block and break block so that `continue` and `break`
// control flow apply to outer loops; not this one.
loop_scope.continue_block = .none;
loop_scope.break_block = .none;
const else_result = try fullBodyExpr(&else_scope, sub_scope, loop_scope.break_result_info, else_node, .allow_branch_hint);
if (is_statement) {
_ = try addEnsureResult(&else_scope, else_result, else_node);
}
if (!else_scope.endsWithNoReturn()) {
_ = try else_scope.addBreakWithSrcNode(break_tag, loop_block, else_result, else_node);
}
} else {
const result = try rvalue(&else_scope, ri, .void_value, node);
_ = try else_scope.addBreak(break_tag, loop_block, result);
}
if (loop_scope.label) |some| {
if (!some.used) {
try astgen.appendErrorTok(some.token, "unused for loop label", .{});
}
}
try setCondBrPayload(condbr, cond, &then_scope, &else_scope);
// then_block and else_block unstacked now, can resurrect loop_scope to finally finish it
{
loop_scope.instructions_top = loop_scope.instructions.items.len;
try loop_scope.instructions.appendSlice(gpa, &.{ index.toIndex().?, cond_block });
// Increment the index variable.
const index_plus_one = try loop_scope.addPlNode(.add_unsafe, node, Zir.Inst.Bin{
.lhs = index,
.rhs = .one_usize,
});
_ = try loop_scope.addPlNode(.store_node, node, Zir.Inst.Bin{
.lhs = index_ptr,
.rhs = index_plus_one,
});
const repeat_tag: Zir.Inst.Tag = if (is_inline) .repeat_inline else .repeat;
_ = try loop_scope.addNode(repeat_tag, node);
try loop_scope.setBlockBody(loop_block);
}
const result = if (need_result_rvalue)
try rvalue(parent_gz, ri, loop_block.toRef(), node)
else
loop_block.toRef();
if (is_statement) {
_ = try parent_gz.addUnNode(.ensure_result_used, result, node);
}
return result;
}