structDeclInner
Function parameters
Parameters
- gz:*GenZir
- scope:*Scope
- node:Ast.Node.Index
- container_decl:Ast.full.ContainerDecl
- layout:std.builtin.Type.ContainerLayout
- backing_int_node:Ast.Node.OptionalIndex
- name_strat:Zir.Inst.NameStrategy
Functions in this namespace
Functions
Source
Implementation
fn structDeclInner(
gz: *GenZir,
scope: *Scope,
node: Ast.Node.Index,
container_decl: Ast.full.ContainerDecl,
layout: std.builtin.Type.ContainerLayout,
backing_int_node: Ast.Node.OptionalIndex,
name_strat: Zir.Inst.NameStrategy,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
is_tuple: {
const tuple_field_node = for (container_decl.ast.members) |member_node| {
const container_field = tree.fullContainerField(member_node) orelse continue;
if (container_field.ast.tuple_like) break member_node;
} else break :is_tuple;
if (node == .root) {
return astgen.failNode(tuple_field_node, "file cannot be a tuple", .{});
} else {
return tupleDecl(gz, scope, node, container_decl, layout, backing_int_node);
}
}
const decl_inst = try gz.reserveInstructionIndex();
if (container_decl.ast.members.len == 0 and backing_int_node == .none) {
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
.captures_len = 0,
.fields_len = 0,
.decls_len = 0,
.has_backing_int = false,
.known_non_opv = false,
.known_comptime_only = false,
.any_comptime_fields = false,
.any_default_inits = false,
.any_aligned_fields = false,
.fields_hash = std.zig.hashSrc(@tagName(layout)),
.name_strat = name_strat,
});
return decl_inst.toRef();
}
var namespace: Scope.Namespace = .{
.parent = scope,
.node = node,
.inst = decl_inst,
.declaring_gz = gz,
.maybe_generic = astgen.within_fn,
};
defer namespace.deinit(gpa);
// The struct_decl instruction introduces a scope in which the decls of the struct
// are in scope, so that field types, alignments, and default value expressions
// can refer to decls within the struct itself.
astgen.advanceSourceCursorToNode(node);
var block_scope: GenZir = .{
.parent = &namespace.base,
.decl_node_index = node,
.decl_line = gz.decl_line,
.astgen = astgen,
.is_comptime = true,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
};
defer block_scope.unstack();
const scratch_top = astgen.scratch.items.len;
defer astgen.scratch.items.len = scratch_top;
var backing_int_body_len: usize = 0;
const backing_int_ref: Zir.Inst.Ref = blk: {
if (backing_int_node.unwrap()) |arg| {
if (layout != .@"packed") {
return astgen.failNode(arg, "non-packed struct does not support backing integer type", .{});
} else {
const backing_int_ref = try typeExpr(&block_scope, &namespace.base, arg);
if (!block_scope.isEmpty()) {
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, backing_int_ref);
}
const body = block_scope.instructionsSlice();
const old_scratch_len = astgen.scratch.items.len;
try astgen.scratch.ensureUnusedCapacity(gpa, countBodyLenAfterFixups(astgen, body));
appendBodyWithFixupsArrayList(astgen, &astgen.scratch, body);
backing_int_body_len = astgen.scratch.items.len - old_scratch_len;
block_scope.instructions.items.len = block_scope.instructions_top;
}
break :blk backing_int_ref;
}
} else {
break :blk .none;
}
};
const decl_count = try astgen.scanContainer(&namespace, container_decl.ast.members, .@"struct");
const field_count: u32 = @intCast(container_decl.ast.members.len - decl_count);
const bits_per_field = 4;
const max_field_size = 5;
var wip_members = try WipMembers.init(gpa, &astgen.scratch, decl_count, field_count, bits_per_field, max_field_size);
defer wip_members.deinit();
// We will use the scratch buffer, starting here, for the bodies:
// bodies: { // for every fields_len
// field_type_body_inst: Inst, // for each field_type_body_len
// align_body_inst: Inst, // for each align_body_len
// init_body_inst: Inst, // for each init_body_len
// }
// Note that the scratch buffer is simultaneously being used by WipMembers, however
// it will not access any elements beyond this point in the ArrayList. It also
// accesses via the ArrayList items field so it can handle the scratch buffer being
// reallocated.
// No defer needed here because it is handled by `wip_members.deinit()` above.
const bodies_start = astgen.scratch.items.len;
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
astgen.src_hasher = std.zig.SrcHasher.init(.{});
astgen.src_hasher.update(@tagName(layout));
if (backing_int_node.unwrap()) |arg| {
astgen.src_hasher.update(tree.getNodeSource(arg));
}
var known_non_opv = false;
var known_comptime_only = false;
var any_comptime_fields = false;
var any_aligned_fields = false;
var any_default_inits = false;
for (container_decl.ast.members) |member_node| {
var member = switch (try containerMember(&block_scope, &namespace.base, &wip_members, member_node)) {
.decl => continue,
.field => |field| field,
};
astgen.src_hasher.update(tree.getNodeSource(member_node));
const field_name = try astgen.identAsString(member.ast.main_token);
member.convertToNonTupleLike(astgen.tree);
assert(!member.ast.tuple_like);
wip_members.appendToField(@intFromEnum(field_name));
const type_expr = member.ast.type_expr.unwrap() orelse {
return astgen.failTok(member.ast.main_token, "struct field missing type", .{});
};
const field_type = try typeExpr(&block_scope, &namespace.base, type_expr);
const have_type_body = !block_scope.isEmpty();
const have_align = member.ast.align_expr != .none;
const have_value = member.ast.value_expr != .none;
const is_comptime = member.comptime_token != null;
if (is_comptime) {
switch (layout) {
.@"packed", .@"extern" => return astgen.failTok(member.comptime_token.?, "{s} struct fields cannot be marked comptime", .{@tagName(layout)}),
.auto => any_comptime_fields = true,
}
} else {
known_non_opv = known_non_opv or
nodeImpliesMoreThanOnePossibleValue(tree, type_expr);
known_comptime_only = known_comptime_only or
nodeImpliesComptimeOnly(tree, type_expr);
}
wip_members.nextField(bits_per_field, .{ have_align, have_value, is_comptime, have_type_body });
if (have_type_body) {
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, field_type);
}
const body = block_scope.instructionsSlice();
const old_scratch_len = astgen.scratch.items.len;
try astgen.scratch.ensureUnusedCapacity(gpa, countBodyLenAfterFixups(astgen, body));
appendBodyWithFixupsArrayList(astgen, &astgen.scratch, body);
wip_members.appendToField(@intCast(astgen.scratch.items.len - old_scratch_len));
block_scope.instructions.items.len = block_scope.instructions_top;
} else {
wip_members.appendToField(@intFromEnum(field_type));
}
if (member.ast.align_expr.unwrap()) |align_expr| {
if (layout == .@"packed") {
return astgen.failNode(align_expr, "unable to override alignment of packed struct fields", .{});
}
any_aligned_fields = true;
const align_ref = try expr(&block_scope, &namespace.base, coerced_align_ri, align_expr);
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, align_ref);
}
const body = block_scope.instructionsSlice();
const old_scratch_len = astgen.scratch.items.len;
try astgen.scratch.ensureUnusedCapacity(gpa, countBodyLenAfterFixups(astgen, body));
appendBodyWithFixupsArrayList(astgen, &astgen.scratch, body);
wip_members.appendToField(@intCast(astgen.scratch.items.len - old_scratch_len));
block_scope.instructions.items.len = block_scope.instructions_top;
}
if (member.ast.value_expr.unwrap()) |value_expr| {
any_default_inits = true;
// The decl_inst is used as here so that we can easily reconstruct a mapping
// between it and the field type when the fields inits are analyzed.
const ri: ResultInfo = .{ .rl = if (field_type == .none) .none else .{ .coerced_ty = decl_inst.toRef() } };
const default_inst = try expr(&block_scope, &namespace.base, ri, value_expr);
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, default_inst);
}
const body = block_scope.instructionsSlice();
const old_scratch_len = astgen.scratch.items.len;
try astgen.scratch.ensureUnusedCapacity(gpa, countBodyLenAfterFixups(astgen, body));
appendBodyWithFixupsArrayList(astgen, &astgen.scratch, body);
wip_members.appendToField(@intCast(astgen.scratch.items.len - old_scratch_len));
block_scope.instructions.items.len = block_scope.instructions_top;
} else if (member.comptime_token) |comptime_token| {
return astgen.failTok(comptime_token, "comptime field without default initialization value", .{});
}
}
var fields_hash: std.zig.SrcHash = undefined;
astgen.src_hasher.final(&fields_hash);
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
.captures_len = @intCast(namespace.captures.count()),
.fields_len = field_count,
.decls_len = decl_count,
.has_backing_int = backing_int_ref != .none,
.known_non_opv = known_non_opv,
.known_comptime_only = known_comptime_only,
.any_comptime_fields = any_comptime_fields,
.any_default_inits = any_default_inits,
.any_aligned_fields = any_aligned_fields,
.fields_hash = fields_hash,
.name_strat = name_strat,
});
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
const bodies_slice = astgen.scratch.items[bodies_start..];
try astgen.extra.ensureUnusedCapacity(gpa, backing_int_body_len + 2 +
decls_slice.len + namespace.captures.count() * 2 + fields_slice.len + bodies_slice.len);
astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.values()));
if (backing_int_ref != .none) {
astgen.extra.appendAssumeCapacity(@intCast(backing_int_body_len));
if (backing_int_body_len == 0) {
astgen.extra.appendAssumeCapacity(@intFromEnum(backing_int_ref));
} else {
astgen.extra.appendSliceAssumeCapacity(astgen.scratch.items[scratch_top..][0..backing_int_body_len]);
}
}
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
astgen.extra.appendSliceAssumeCapacity(bodies_slice);
block_scope.unstack();
return decl_inst.toRef();
}