45
45
46
46
#![ allow( unsigned_negate) ]
47
47
48
- use libc:: c_ulonglong;
49
48
use std:: collections:: Map ;
50
49
use std:: num:: Int ;
51
50
use std:: rc:: Rc ;
@@ -132,7 +131,7 @@ pub struct Struct {
132
131
// If the struct is DST, then the size and alignment do not take into
133
132
// account the unsized fields of the struct.
134
133
pub size : u64 ,
135
- pub align : u64 ,
134
+ pub align : u32 ,
136
135
pub sized : bool ,
137
136
pub packed : bool ,
138
137
pub fields : Vec < ty:: t >
@@ -164,7 +163,7 @@ pub fn represent_type(cx: &CrateContext, t: ty::t) -> Rc<Repr> {
164
163
fn represent_type_uncached ( cx : & CrateContext , t : ty:: t ) -> Repr {
165
164
match ty:: get ( t) . sty {
166
165
ty:: ty_tup( ref elems) => {
167
- return Univariant ( mk_struct ( cx, elems. as_slice ( ) , false ) , false )
166
+ return Univariant ( mk_struct ( cx, elems. as_slice ( ) , false , t ) , false )
168
167
}
169
168
ty:: ty_struct( def_id, ref substs) => {
170
169
let fields = ty:: lookup_struct_fields ( cx. tcx ( ) , def_id) ;
@@ -175,12 +174,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
175
174
let dtor = ty:: ty_dtor ( cx. tcx ( ) , def_id) . has_drop_flag ( ) ;
176
175
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
177
176
178
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , packed) , dtor)
177
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , packed, t ) , dtor)
179
178
}
180
179
ty:: ty_unboxed_closure( def_id, _) => {
181
180
let upvars = ty:: unboxed_closure_upvars ( cx. tcx ( ) , def_id) ;
182
181
let upvar_types = upvars. iter ( ) . map ( |u| u. ty ) . collect :: < Vec < _ > > ( ) ;
183
- return Univariant ( mk_struct ( cx, upvar_types. as_slice ( ) , false ) ,
182
+ return Univariant ( mk_struct ( cx, upvar_types. as_slice ( ) , false , t ) ,
184
183
false )
185
184
}
186
185
ty:: ty_enum( def_id, ref substs) => {
@@ -195,7 +194,8 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
195
194
// (Typechecking will reject discriminant-sizing attrs.)
196
195
assert_eq ! ( hint, attr:: ReprAny ) ;
197
196
let ftys = if dtor { vec ! ( ty:: mk_bool( ) ) } else { vec ! ( ) } ;
198
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false ) , dtor) ;
197
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false , t) ,
198
+ dtor) ;
199
199
}
200
200
201
201
if !dtor && cases. iter ( ) . all ( |c| c. tys . len ( ) == 0 ) {
@@ -226,15 +226,17 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
226
226
assert_eq ! ( hint, attr:: ReprAny ) ;
227
227
let mut ftys = cases. get ( 0 ) . tys . clone ( ) ;
228
228
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
229
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false ) , dtor) ;
229
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false , t) ,
230
+ dtor) ;
230
231
}
231
232
232
233
if !dtor && cases. len ( ) == 2 && hint == attr:: ReprAny {
233
234
// Nullable pointer optimization
234
235
let mut discr = 0 ;
235
236
while discr < 2 {
236
- if cases. get ( 1 - discr) . is_zerolen ( cx) {
237
- let st = mk_struct ( cx, cases. get ( discr) . tys . as_slice ( ) , false ) ;
237
+ if cases. get ( 1 - discr) . is_zerolen ( cx, t) {
238
+ let st = mk_struct ( cx, cases. get ( discr) . tys . as_slice ( ) ,
239
+ false , t) ;
238
240
match cases. get ( discr) . find_ptr ( ) {
239
241
Some ( ThinPointer ( _) ) if st. fields . len ( ) == 1 => {
240
242
return RawNullablePointer {
@@ -264,11 +266,15 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
264
266
slo : 0 , shi : ( cases. len ( ) - 1 ) as i64 } ;
265
267
let ity = range_to_inttype ( cx, hint, & bounds) ;
266
268
267
- return General ( ity , cases. iter ( ) . map ( |c| {
269
+ let fields : Vec < _ > = cases. iter ( ) . map ( |c| {
268
270
let mut ftys = vec ! ( ty_of_inttype( ity) ) . append ( c. tys . as_slice ( ) ) ;
269
271
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
270
- mk_struct ( cx, ftys. as_slice ( ) , false )
271
- } ) . collect ( ) , dtor) ;
272
+ mk_struct ( cx, ftys. as_slice ( ) , false , t)
273
+ } ) . collect ( ) ;
274
+
275
+ ensure_enum_fits_in_address_space ( cx, ity, fields. as_slice ( ) , t) ;
276
+
277
+ General ( ity, fields, dtor)
272
278
}
273
279
_ => cx. sess ( ) . bug ( format ! ( "adt::represent_type called on non-ADT type: {}" ,
274
280
ty_to_string( cx. tcx( ) , t) ) . as_slice ( ) )
@@ -289,8 +295,8 @@ pub enum PointerField {
289
295
}
290
296
291
297
impl Case {
292
- fn is_zerolen ( & self , cx : & CrateContext ) -> bool {
293
- mk_struct ( cx, self . tys . as_slice ( ) , false ) . size == 0
298
+ fn is_zerolen ( & self , cx : & CrateContext , scapegoat : ty :: t ) -> bool {
299
+ mk_struct ( cx, self . tys . as_slice ( ) , false , scapegoat ) . size == 0
294
300
}
295
301
296
302
fn find_ptr ( & self ) -> Option < PointerField > {
@@ -345,29 +351,25 @@ fn get_cases(tcx: &ty::ctxt, def_id: ast::DefId, substs: &subst::Substs) -> Vec<
345
351
} ) . collect ( )
346
352
}
347
353
348
- fn mk_struct ( cx : & CrateContext , tys : & [ ty:: t ] , packed : bool ) -> Struct {
349
- if tys. iter ( ) . all ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , ty) ) {
350
- let lltys = tys. iter ( ) . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect :: < Vec < _ > > ( ) ;
351
- let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
352
- Struct {
353
- size : machine:: llsize_of_alloc ( cx, llty_rec) ,
354
- align : machine:: llalign_of_min ( cx, llty_rec) ,
355
- sized : true ,
356
- packed : packed,
357
- fields : Vec :: from_slice ( tys) ,
358
- }
354
+ fn mk_struct ( cx : & CrateContext , tys : & [ ty:: t ] , packed : bool , scapegoat : ty:: t ) -> Struct {
355
+ let sized = tys. iter ( ) . all ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , ty) ) ;
356
+ let lltys : Vec < Type > = if sized {
357
+ tys. iter ( )
358
+ . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect ( )
359
359
} else {
360
- // Ignore any dynamically sized fields.
361
- let lltys = tys. iter ( ) . filter ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , * ty) )
362
- . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect :: < Vec < _ > > ( ) ;
363
- let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
364
- Struct {
365
- size : machine:: llsize_of_alloc ( cx, llty_rec) ,
366
- align : machine:: llalign_of_min ( cx, llty_rec) ,
367
- sized : false ,
368
- packed : packed,
369
- fields : Vec :: from_slice ( tys) ,
370
- }
360
+ tys. iter ( ) . filter ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , * ty) )
361
+ . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect ( )
362
+ } ;
363
+
364
+ ensure_struct_fits_in_address_space ( cx, lltys. as_slice ( ) , packed, scapegoat) ;
365
+
366
+ let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
367
+ Struct {
368
+ size : machine:: llsize_of_alloc ( cx, llty_rec) ,
369
+ align : machine:: llalign_of_min ( cx, llty_rec) ,
370
+ sized : sized,
371
+ packed : packed,
372
+ fields : Vec :: from_slice ( tys) ,
371
373
}
372
374
}
373
375
@@ -463,6 +465,51 @@ pub fn ty_of_inttype(ity: IntType) -> ty::t {
463
465
}
464
466
}
465
467
468
+ // LLVM doesn't like types that don't fit in the address space
469
+ fn ensure_struct_fits_in_address_space ( ccx : & CrateContext ,
470
+ fields : & [ Type ] ,
471
+ packed : bool ,
472
+ scapegoat : ty:: t ) {
473
+ let mut offset = 0 ;
474
+ for & llty in fields. iter ( ) {
475
+ // Invariant: offset < ccx.max_obj_size() <= 1<<61
476
+ if !packed {
477
+ let type_align = machine:: llalign_of_min ( ccx, llty) ;
478
+ offset = roundup ( offset, type_align) ;
479
+ }
480
+ // type_align is a power-of-2, so still offset < ccx.max_obj_size()
481
+ // llsize_of_alloc(ccx, llty) is also less than ccx.max_obj_size()
482
+ // so the sum is less than 1<<62 (and therefore can't overflow).
483
+ offset += machine:: llsize_of_alloc ( ccx, llty) ;
484
+
485
+ if offset >= ccx. max_obj_size ( ) {
486
+ ccx. report_overbig_object ( scapegoat) ;
487
+ }
488
+ }
489
+ }
490
+
491
+ fn union_size_and_align ( sts : & [ Struct ] ) -> ( machine:: llsize , machine:: llalign ) {
492
+ let size = sts. iter ( ) . map ( |st| st. size ) . max ( ) . unwrap ( ) ;
493
+ let most_aligned = sts. iter ( ) . max_by ( |st| st. align ) . unwrap ( ) ;
494
+ ( size, most_aligned. align )
495
+ }
496
+
497
+ fn ensure_enum_fits_in_address_space ( ccx : & CrateContext ,
498
+ discr : IntType ,
499
+ fields : & [ Struct ] ,
500
+ scapegoat : ty:: t ) {
501
+ let discr_size = machine:: llsize_of_alloc ( ccx, ll_inttype ( ccx, discr) ) ;
502
+ let ( field_size, field_align) = union_size_and_align ( fields) ;
503
+
504
+ // field_align < 1<<32, discr_size <= 8, field_size < MAX_OBJ_SIZE <= 1<<61
505
+ // so the sum is less than 1<<62 (and can't overflow).
506
+ let total_size = roundup ( discr_size, field_align) + field_size;
507
+
508
+ if total_size >= ccx. max_obj_size ( ) {
509
+ ccx. report_overbig_object ( scapegoat) ;
510
+ }
511
+ }
512
+
466
513
467
514
/**
468
515
* LLVM-level types are a little complicated.
@@ -525,13 +572,12 @@ fn generic_type_of(cx: &CrateContext,
525
572
// of the size.
526
573
//
527
574
// FIXME #10604: this breaks when vector types are present.
528
- let size = sts. iter ( ) . map ( |st| st. size ) . max ( ) . unwrap ( ) ;
529
- let most_aligned = sts. iter ( ) . max_by ( |st| st. align ) . unwrap ( ) ;
530
- let align = most_aligned. align ;
575
+ let ( size, align) = union_size_and_align ( sts. as_slice ( ) ) ;
576
+ let align_s = align as u64 ;
531
577
let discr_ty = ll_inttype ( cx, ity) ;
532
- let discr_size = machine:: llsize_of_alloc ( cx, discr_ty) as u64 ;
533
- let align_units = ( size + align - 1 ) / align - 1 ;
534
- let pad_ty = match align {
578
+ let discr_size = machine:: llsize_of_alloc ( cx, discr_ty) ;
579
+ let align_units = ( size + align_s - 1 ) / align_s - 1 ;
580
+ let pad_ty = match align_s {
535
581
1 => Type :: array ( & Type :: i8 ( cx) , align_units) ,
536
582
2 => Type :: array ( & Type :: i16 ( cx) , align_units) ,
537
583
4 => Type :: array ( & Type :: i32 ( cx) , align_units) ,
@@ -541,10 +587,10 @@ fn generic_type_of(cx: &CrateContext,
541
587
align_units) ,
542
588
_ => fail ! ( "unsupported enum alignment: {}" , align)
543
589
} ;
544
- assert_eq ! ( machine:: llalign_of_min( cx, pad_ty) as u64 , align) ;
545
- assert_eq ! ( align % discr_size, 0 ) ;
590
+ assert_eq ! ( machine:: llalign_of_min( cx, pad_ty) , align) ;
591
+ assert_eq ! ( align_s % discr_size, 0 ) ;
546
592
let fields = vec ! ( discr_ty,
547
- Type :: array( & discr_ty, align / discr_size - 1 ) ,
593
+ Type :: array( & discr_ty, align_s / discr_size - 1 ) ,
548
594
pad_ty) ;
549
595
match name {
550
596
None => Type :: struct_ ( cx, fields. as_slice ( ) , false ) ,
@@ -653,9 +699,7 @@ fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr)
653
699
} else {
654
700
// llvm::ConstantRange can deal with ranges that wrap around,
655
701
// so an overflow on (max + 1) is fine.
656
- LoadRangeAssert ( bcx, ptr, min as c_ulonglong ,
657
- ( max + 1 ) as c_ulonglong ,
658
- /* signed: */ True )
702
+ LoadRangeAssert ( bcx, ptr, min, ( max+1 ) , /* signed: */ True )
659
703
}
660
704
}
661
705
@@ -974,11 +1018,11 @@ fn compute_struct_field_offsets(ccx: &CrateContext, st: &Struct) -> Vec<u64> {
974
1018
for & ty in st. fields . iter ( ) {
975
1019
let llty = type_of:: sizing_type_of ( ccx, ty) ;
976
1020
if !st. packed {
977
- let type_align = type_of:: align_of ( ccx, ty) as u64 ;
1021
+ let type_align = type_of:: align_of ( ccx, ty) ;
978
1022
offset = roundup ( offset, type_align) ;
979
1023
}
980
1024
offsets. push ( offset) ;
981
- offset += machine:: llsize_of_alloc ( ccx, llty) as u64 ;
1025
+ offset += machine:: llsize_of_alloc ( ccx, llty) ;
982
1026
}
983
1027
assert_eq ! ( st. fields. len( ) , offsets. len( ) ) ;
984
1028
offsets
@@ -1005,8 +1049,7 @@ fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef])
1005
1049
let mut cfields = Vec :: new ( ) ;
1006
1050
for ( & val, & target_offset) in vals. iter ( ) . zip ( target_offsets. iter ( ) ) {
1007
1051
if !st. packed {
1008
- let val_align = machine:: llalign_of_min ( ccx, val_ty ( val) )
1009
- /*bad*/ as u64 ;
1052
+ let val_align = machine:: llalign_of_min ( ccx, val_ty ( val) ) ;
1010
1053
offset = roundup ( offset, val_align) ;
1011
1054
}
1012
1055
if offset != target_offset {
@@ -1015,7 +1058,7 @@ fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef])
1015
1058
}
1016
1059
assert ! ( !is_undef( val) ) ;
1017
1060
cfields. push ( val) ;
1018
- offset += machine:: llsize_of_alloc ( ccx, val_ty ( val) ) as u64 ;
1061
+ offset += machine:: llsize_of_alloc ( ccx, val_ty ( val) ) ;
1019
1062
}
1020
1063
1021
1064
assert ! ( st. sized && offset <= st. size) ;
@@ -1032,7 +1075,7 @@ fn padding(ccx: &CrateContext, size: u64) -> ValueRef {
1032
1075
1033
1076
// FIXME this utility routine should be somewhere more general
1034
1077
#[ inline]
1035
- fn roundup ( x : u64 , a : u64 ) -> u64 { ( ( x + ( a - 1 ) ) / a) * a }
1078
+ fn roundup ( x : u64 , a : u32 ) -> u64 { let a = a as u64 ; ( ( x + ( a - 1 ) ) / a) * a }
1036
1079
1037
1080
/// Get the discriminant of a constant value. (Not currently used.)
1038
1081
pub fn const_get_discrim ( ccx : & CrateContext , r : & Repr , val : ValueRef )
0 commit comments