Source
pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
assert(tree.mode == .zig);
var arena = std.heap.ArenaAllocator.init(gpa);
defer arena.deinit();
var nodes_need_rl = try AstRlAnnotate.annotate(gpa, arena.allocator(), tree);
defer nodes_need_rl.deinit(gpa);
var astgen: AstGen = .{
.gpa = gpa,
.arena = arena.allocator(),
.tree = &tree,
.nodes_need_rl = &nodes_need_rl,
.src_hasher = undefined, // `structDeclInner` for the root struct will set this
};
defer astgen.deinit(gpa);
// String table index 0 is reserved for `NullTerminatedString.empty`.
try astgen.string_bytes.append(gpa, 0);
// We expect at least as many ZIR instructions and extra data items
// as AST nodes.
try astgen.instructions.ensureTotalCapacity(gpa, tree.nodes.len);
// First few indexes of extra are reserved and set at the end.
const reserved_count = @typeInfo(Zir.ExtraIndex).@"enum".fields.len;
try astgen.extra.ensureTotalCapacity(gpa, tree.nodes.len + reserved_count);
astgen.extra.items.len += reserved_count;
var top_scope: Scope.Top = .{};
var gz_instructions: std.ArrayListUnmanaged(Zir.Inst.Index) = .empty;
var gen_scope: GenZir = .{
.is_comptime = true,
.parent = &top_scope.base,
.anon_name_strategy = .parent,
.decl_node_index = .root,
.decl_line = 0,
.astgen = &astgen,
.instructions = &gz_instructions,
.instructions_top = 0,
};
defer gz_instructions.deinit(gpa);
// The AST -> ZIR lowering process assumes an AST that does not have any parse errors.
// Parse errors, or AstGen errors in the root struct, are considered "fatal", so we emit no ZIR.
const fatal = if (tree.errors.len == 0) fatal: {
if (AstGen.structDeclInner(
&gen_scope,
&gen_scope.base,
.root,
tree.containerDeclRoot(),
.auto,
.none,
)) |struct_decl_ref| {
assert(struct_decl_ref.toIndex().? == .main_struct_inst);
break :fatal false;
} else |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => break :fatal true, // Handled via compile_errors below.
}
} else fatal: {
try lowerAstErrors(&astgen);
break :fatal true;
};
const err_index = @intFromEnum(Zir.ExtraIndex.compile_errors);
if (astgen.compile_errors.items.len == 0) {
astgen.extra.items[err_index] = 0;
} else {
try astgen.extra.ensureUnusedCapacity(gpa, 1 + astgen.compile_errors.items.len *
@typeInfo(Zir.Inst.CompileErrors.Item).@"struct".fields.len);
astgen.extra.items[err_index] = astgen.addExtraAssumeCapacity(Zir.Inst.CompileErrors{
.items_len = @intCast(astgen.compile_errors.items.len),
});
for (astgen.compile_errors.items) |item| {
_ = astgen.addExtraAssumeCapacity(item);
}
}
const imports_index = @intFromEnum(Zir.ExtraIndex.imports);
if (astgen.imports.count() == 0) {
astgen.extra.items[imports_index] = 0;
} else {
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Imports).@"struct".fields.len +
astgen.imports.count() * @typeInfo(Zir.Inst.Imports.Item).@"struct".fields.len);
astgen.extra.items[imports_index] = astgen.addExtraAssumeCapacity(Zir.Inst.Imports{
.imports_len = @intCast(astgen.imports.count()),
});
var it = astgen.imports.iterator();
while (it.next()) |entry| {
_ = astgen.addExtraAssumeCapacity(Zir.Inst.Imports.Item{
.name = entry.key_ptr.*,
.token = entry.value_ptr.*,
});
}
}
return .{
.instructions = if (fatal) .empty else astgen.instructions.toOwnedSlice(),
.string_bytes = try astgen.string_bytes.toOwnedSlice(gpa),
.extra = try astgen.extra.toOwnedSlice(gpa),
};
}