@@ -289,32 +289,25 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
289
289
290
290
let optimize = !repr. inhibit_struct_field_reordering_opt ( ) ;
291
291
if optimize {
292
+ let end =
293
+ if let StructKind :: MaybeUnsized = kind { fields. len ( ) - 1 } else { fields. len ( ) } ;
294
+ let optimizing = & mut inverse_memory_index[ ..end] ;
292
295
let field_align = |f : & TyAndLayout < ' _ > | {
293
296
if let Some ( pack) = pack { f. align . abi . min ( pack) } else { f. align . abi }
294
297
} ;
295
298
match kind {
296
- StructKind :: AlwaysSized => {
297
- inverse_memory_index . sort_by_key ( |& x| {
299
+ StructKind :: AlwaysSized | StructKind :: MaybeUnsized => {
300
+ optimizing . sort_by_key ( |& x| {
298
301
// Place ZSTs first to avoid "interesting offsets",
299
302
// especially with only one or two non-ZST fields.
300
303
let f = & fields[ x as usize ] ;
301
304
( !f. is_zst ( ) , cmp:: Reverse ( field_align ( f) ) )
302
305
} ) ;
303
306
}
304
- StructKind :: MaybeUnsized => {
305
- // Sort in descending alignment, except for the last field,
306
- // which may be accessed through an unsized type.
307
- inverse_memory_index[ ..fields. len ( ) - 1 ]
308
- . sort_by_key ( |& x| cmp:: Reverse ( field_align ( & fields[ x as usize ] ) ) ) ;
309
- // Place ZSTs first to avoid "interesting offsets".
310
- // This will reorder the last field if it is a ZST, which is okay because
311
- // there's nothing in memory that could be accessed through an unsized type.
312
- inverse_memory_index. sort_by_key ( |& x| !fields[ x as usize ] . is_zst ( ) ) ;
313
- }
314
307
StructKind :: Prefixed ( ..) => {
315
308
// Sort in ascending alignment so that the layout stay optimal
316
309
// regardless of the prefix
317
- inverse_memory_index . sort_by_key ( |& x| field_align ( & fields[ x as usize ] ) ) ;
310
+ optimizing . sort_by_key ( |& x| field_align ( & fields[ x as usize ] ) ) ;
318
311
}
319
312
}
320
313
}
@@ -397,78 +390,60 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
397
390
398
391
// Unpack newtype ABIs and find scalar pairs.
399
392
if sized && size. bytes ( ) > 0 {
400
- // All other fields must be ZSTs, and we need them to all start at 0.
401
- let mut zst_offsets = offsets. iter ( ) . enumerate ( ) . filter ( |& ( i, _) | fields[ i] . is_zst ( ) ) ;
402
- if zst_offsets. all ( |( _, o) | o. bytes ( ) == 0 ) {
403
- let mut non_zst_fields = fields. iter ( ) . enumerate ( ) . filter ( |& ( _, f) | !f. is_zst ( ) ) ;
404
-
405
- match ( non_zst_fields. next ( ) , non_zst_fields. next ( ) , non_zst_fields. next ( ) ) {
406
- // We have exactly one non-ZST field.
407
- ( Some ( ( i, field) ) , None , None ) => {
408
- // Field fills the struct and it has a scalar or scalar pair ABI.
409
- if offsets[ i] . bytes ( ) == 0
410
- && align. abi == field. align . abi
411
- && size == field. size
412
- {
413
- match field. abi {
414
- // For plain scalars, or vectors of them, we can't unpack
415
- // newtypes for `#[repr(C)]`, as that affects C ABIs.
416
- Abi :: Scalar ( _) | Abi :: Vector { .. } if optimize => {
417
- abi = field. abi . clone ( ) ;
418
- }
419
- // But scalar pairs are Rust-specific and get
420
- // treated as aggregates by C ABIs anyway.
421
- Abi :: ScalarPair ( ..) => {
422
- abi = field. abi . clone ( ) ;
423
- }
424
- _ => { }
393
+ // All other fields must be ZSTs.
394
+ let mut non_zst_fields = fields. iter ( ) . enumerate ( ) . filter ( |& ( _, f) | !f. is_zst ( ) ) ;
395
+
396
+ match ( non_zst_fields. next ( ) , non_zst_fields. next ( ) , non_zst_fields. next ( ) ) {
397
+ // We have exactly one non-ZST field.
398
+ ( Some ( ( i, field) ) , None , None ) => {
399
+ // Field fills the struct and it has a scalar or scalar pair ABI.
400
+ if offsets[ i] . bytes ( ) == 0 && align. abi == field. align . abi && size == field. size
401
+ {
402
+ match field. abi {
403
+ // For plain scalars, or vectors of them, we can't unpack
404
+ // newtypes for `#[repr(C)]`, as that affects C ABIs.
405
+ Abi :: Scalar ( _) | Abi :: Vector { .. } if optimize => {
406
+ abi = field. abi . clone ( ) ;
407
+ }
408
+ // But scalar pairs are Rust-specific and get
409
+ // treated as aggregates by C ABIs anyway.
410
+ Abi :: ScalarPair ( ..) => {
411
+ abi = field. abi . clone ( ) ;
425
412
}
413
+ _ => { }
426
414
}
427
415
}
416
+ }
428
417
429
- // Two non-ZST fields, and they're both scalars.
430
- (
431
- Some ( (
432
- i,
433
- & TyAndLayout {
434
- layout : & Layout { abi : Abi :: Scalar ( ref a) , .. } , ..
435
- } ,
436
- ) ) ,
437
- Some ( (
438
- j,
439
- & TyAndLayout {
440
- layout : & Layout { abi : Abi :: Scalar ( ref b) , .. } , ..
441
- } ,
442
- ) ) ,
443
- None ,
444
- ) => {
445
- // Order by the memory placement, not source order.
446
- let ( ( i, a) , ( j, b) ) = if offsets[ i] < offsets[ j] {
447
- ( ( i, a) , ( j, b) )
448
- } else {
449
- ( ( j, b) , ( i, a) )
450
- } ;
451
- let pair = self . scalar_pair ( a. clone ( ) , b. clone ( ) ) ;
452
- let pair_offsets = match pair. fields {
453
- FieldsShape :: Arbitrary { ref offsets, ref memory_index } => {
454
- assert_eq ! ( memory_index, & [ 0 , 1 ] ) ;
455
- offsets
456
- }
457
- _ => bug ! ( ) ,
458
- } ;
459
- if offsets[ i] == pair_offsets[ 0 ]
460
- && offsets[ j] == pair_offsets[ 1 ]
461
- && align == pair. align
462
- && size == pair. size
463
- {
464
- // We can use `ScalarPair` only when it matches our
465
- // already computed layout (including `#[repr(C)]`).
466
- abi = pair. abi ;
418
+ // Two non-ZST fields, and they're both scalars.
419
+ (
420
+ Some ( ( i, & TyAndLayout { layout : & Layout { abi : Abi :: Scalar ( ref a) , .. } , .. } ) ) ,
421
+ Some ( ( j, & TyAndLayout { layout : & Layout { abi : Abi :: Scalar ( ref b) , .. } , .. } ) ) ,
422
+ None ,
423
+ ) => {
424
+ // Order by the memory placement, not source order.
425
+ let ( ( i, a) , ( j, b) ) =
426
+ if offsets[ i] < offsets[ j] { ( ( i, a) , ( j, b) ) } else { ( ( j, b) , ( i, a) ) } ;
427
+ let pair = self . scalar_pair ( a. clone ( ) , b. clone ( ) ) ;
428
+ let pair_offsets = match pair. fields {
429
+ FieldsShape :: Arbitrary { ref offsets, ref memory_index } => {
430
+ assert_eq ! ( memory_index, & [ 0 , 1 ] ) ;
431
+ offsets
467
432
}
433
+ _ => bug ! ( ) ,
434
+ } ;
435
+ if offsets[ i] == pair_offsets[ 0 ]
436
+ && offsets[ j] == pair_offsets[ 1 ]
437
+ && align == pair. align
438
+ && size == pair. size
439
+ {
440
+ // We can use `ScalarPair` only when it matches our
441
+ // already computed layout (including `#[repr(C)]`).
442
+ abi = pair. abi ;
468
443
}
469
-
470
- _ => { }
471
444
}
445
+
446
+ _ => { }
472
447
}
473
448
}
474
449
0 commit comments