@@ -205,7 +205,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
205
205
};
206
206
}
207
207
208
- pub fn deinit (astgen : * AstGen , gpa : Allocator ) void {
208
+ fn deinit (astgen : * AstGen , gpa : Allocator ) void {
209
209
astgen .instructions .deinit (gpa );
210
210
astgen .extra .deinit (gpa );
211
211
astgen .string_table .deinit (gpa );
@@ -216,7 +216,7 @@ pub fn deinit(astgen: *AstGen, gpa: Allocator) void {
216
216
astgen .ref_table .deinit (gpa );
217
217
}
218
218
219
- pub const ResultInfo = struct {
219
+ const ResultInfo = struct {
220
220
/// The semantics requested for the result location
221
221
rl : Loc ,
222
222
@@ -245,7 +245,7 @@ pub const ResultInfo = struct {
245
245
}
246
246
}
247
247
248
- pub const Loc = union (enum ) {
248
+ const Loc = union (enum ) {
249
249
/// The expression is the right-hand side of assignment to `_`. Only the side-effects of the
250
250
/// expression should be generated. The result instruction from the expression must
251
251
/// be ignored.
@@ -277,11 +277,11 @@ pub const ResultInfo = struct {
277
277
src_node : ? Ast.Node.Index = null ,
278
278
};
279
279
280
- pub const Strategy = struct {
280
+ const Strategy = struct {
281
281
elide_store_to_block_ptr_instructions : bool ,
282
282
tag : Tag ,
283
283
284
- pub const Tag = enum {
284
+ const Tag = enum {
285
285
/// Both branches will use break_void; result location is used to communicate the
286
286
/// result instruction.
287
287
break_void ,
@@ -331,7 +331,7 @@ pub const ResultInfo = struct {
331
331
}
332
332
};
333
333
334
- pub const Context = enum {
334
+ const Context = enum {
335
335
/// The expression is the operand to a return expression.
336
336
@"return" ,
337
337
/// The expression is the input to an error-handling operator (if-else, try, or catch).
@@ -349,11 +349,11 @@ pub const ResultInfo = struct {
349
349
};
350
350
};
351
351
352
- pub const align_ri : ResultInfo = .{ .rl = .{ .ty = .u29_type } };
353
- pub const coerced_align_ri : ResultInfo = .{ .rl = .{ .coerced_ty = .u29_type } };
354
- pub const bool_ri : ResultInfo = .{ .rl = .{ .ty = .bool_type } };
355
- pub const type_ri : ResultInfo = .{ .rl = .{ .ty = .type_type } };
356
- pub const coerced_type_ri : ResultInfo = .{ .rl = .{ .coerced_ty = .type_type } };
352
+ const align_ri : ResultInfo = .{ .rl = .{ .ty = .u29_type } };
353
+ const coerced_align_ri : ResultInfo = .{ .rl = .{ .coerced_ty = .u29_type } };
354
+ const bool_ri : ResultInfo = .{ .rl = .{ .ty = .bool_type } };
355
+ const type_ri : ResultInfo = .{ .rl = .{ .ty = .type_type } };
356
+ const coerced_type_ri : ResultInfo = .{ .rl = .{ .coerced_ty = .type_type } };
357
357
358
358
fn typeExpr (gz : * GenZir , scope : * Scope , type_node : Ast.Node.Index ) InnerError ! Zir.Inst.Ref {
359
359
const prev_force_comptime = gz .force_comptime ;
@@ -3507,7 +3507,7 @@ const WipMembers = struct {
3507
3507
/// (4 for src_hash + line + name + value + doc_comment + align + link_section + address_space )
3508
3508
const max_decl_size = 11 ;
3509
3509
3510
- pub fn init (gpa : Allocator , payload : * ArrayListUnmanaged (u32 ), decl_count : u32 , field_count : u32 , comptime bits_per_field : u32 , comptime max_field_size : u32 ) Allocator .Error ! Self {
3510
+ fn init (gpa : Allocator , payload : * ArrayListUnmanaged (u32 ), decl_count : u32 , field_count : u32 , comptime bits_per_field : u32 , comptime max_field_size : u32 ) Allocator .Error ! Self {
3511
3511
const payload_top = @intCast (u32 , payload .items .len );
3512
3512
const decls_start = payload_top + (decl_count + decls_per_u32 - 1 ) / decls_per_u32 ;
3513
3513
const field_bits_start = decls_start + decl_count * max_decl_size ;
@@ -3528,7 +3528,7 @@ const WipMembers = struct {
3528
3528
};
3529
3529
}
3530
3530
3531
- pub fn nextDecl (self : * Self , is_pub : bool , is_export : bool , has_align : bool , has_section_or_addrspace : bool ) void {
3531
+ fn nextDecl (self : * Self , is_pub : bool , is_export : bool , has_align : bool , has_section_or_addrspace : bool ) void {
3532
3532
const index = self .payload_top + self .decl_index / decls_per_u32 ;
3533
3533
assert (index < self .decls_start );
3534
3534
const bit_bag : u32 = if (self .decl_index % decls_per_u32 == 0 ) 0 else self .payload .items [index ];
@@ -3540,7 +3540,7 @@ const WipMembers = struct {
3540
3540
self .decl_index += 1 ;
3541
3541
}
3542
3542
3543
- pub fn nextField (self : * Self , comptime bits_per_field : u32 , bits : [bits_per_field ]bool ) void {
3543
+ fn nextField (self : * Self , comptime bits_per_field : u32 , bits : [bits_per_field ]bool ) void {
3544
3544
const fields_per_u32 = 32 / bits_per_field ;
3545
3545
const index = self .field_bits_start + self .field_index / fields_per_u32 ;
3546
3546
assert (index < self .fields_start );
@@ -3554,25 +3554,25 @@ const WipMembers = struct {
3554
3554
self .field_index += 1 ;
3555
3555
}
3556
3556
3557
- pub fn appendToDecl (self : * Self , data : u32 ) void {
3557
+ fn appendToDecl (self : * Self , data : u32 ) void {
3558
3558
assert (self .decls_end < self .field_bits_start );
3559
3559
self .payload .items [self .decls_end ] = data ;
3560
3560
self .decls_end += 1 ;
3561
3561
}
3562
3562
3563
- pub fn appendToDeclSlice (self : * Self , data : []const u32 ) void {
3563
+ fn appendToDeclSlice (self : * Self , data : []const u32 ) void {
3564
3564
assert (self .decls_end + data .len <= self .field_bits_start );
3565
3565
mem .copy (u32 , self .payload .items [self .decls_end .. ], data );
3566
3566
self .decls_end += @intCast (u32 , data .len );
3567
3567
}
3568
3568
3569
- pub fn appendToField (self : * Self , data : u32 ) void {
3569
+ fn appendToField (self : * Self , data : u32 ) void {
3570
3570
assert (self .fields_end < self .payload .items .len );
3571
3571
self .payload .items [self .fields_end ] = data ;
3572
3572
self .fields_end += 1 ;
3573
3573
}
3574
3574
3575
- pub fn finishBits (self : * Self , comptime bits_per_field : u32 ) void {
3575
+ fn finishBits (self : * Self , comptime bits_per_field : u32 ) void {
3576
3576
const empty_decl_slots = decls_per_u32 - (self .decl_index % decls_per_u32 );
3577
3577
if (self .decl_index > 0 and empty_decl_slots < decls_per_u32 ) {
3578
3578
const index = self .payload_top + self .decl_index / decls_per_u32 ;
@@ -3588,15 +3588,15 @@ const WipMembers = struct {
3588
3588
}
3589
3589
}
3590
3590
3591
- pub fn declsSlice (self : * Self ) []u32 {
3591
+ fn declsSlice (self : * Self ) []u32 {
3592
3592
return self .payload .items [self .payload_top .. self .decls_end ];
3593
3593
}
3594
3594
3595
- pub fn fieldsSlice (self : * Self ) []u32 {
3595
+ fn fieldsSlice (self : * Self ) []u32 {
3596
3596
return self .payload .items [self .field_bits_start .. self .fields_end ];
3597
3597
}
3598
3598
3599
- pub fn deinit (self : * Self ) void {
3599
+ fn deinit (self : * Self ) void {
3600
3600
self .payload .items .len = self .payload_top ;
3601
3601
}
3602
3602
};
@@ -10803,7 +10803,7 @@ const Scope = struct {
10803
10803
/// ref of the capture for decls in this namespace
10804
10804
captures : std .AutoArrayHashMapUnmanaged (Zir.Inst.Index , Zir .Inst .Index ) = .{},
10805
10805
10806
- pub fn deinit (self : * Namespace , gpa : Allocator ) void {
10806
+ fn deinit (self : * Namespace , gpa : Allocator ) void {
10807
10807
self .decls .deinit (gpa );
10808
10808
self .captures .deinit (gpa );
10809
10809
self .* = undefined ;
0 commit comments