mirror of https://github.com/rust-lang/rust.git
Auto merge of #125360 - RalfJung:packed-field-reorder, r=fmease
don't inhibit random field reordering on repr(packed(1)) `inhibit_struct_field_reordering_opt` being false means we exclude this type from random field shuffling. However, `packed(1)` types can still be shuffled! The logic was added in https://github.com/rust-lang/rust/pull/48528 since it's pointless to reorder fields in packed(1) types (there's no padding that could be saved) -- but that shouldn't inhibit `-Zrandomize-layout` (which did not exist at the time). We could add an optimization elsewhere to not bother sorting the fields for `repr(packed)` types, but I don't think that's worth the effort. This *does* change the behavior in that we may now reorder fields of `packed(1)` structs (e.g. if there are niches, we'll try to move them to the start/end, according to `NicheBias`). We were always allowed to do that but so far we didn't. Quoting the [reference](https://doc.rust-lang.org/reference/type-layout.html): > On their own, align and packed do not provide guarantees about the order of fields in the layout of a struct or the layout of an enum variant, although they may be combined with representations (such as C) which do provide such guarantees.
This commit is contained in:
commit
f2e1a3a80a
|
@ -970,7 +970,7 @@ fn univariant<
|
|||
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
||||
let mut max_repr_align = repr.align;
|
||||
let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
|
||||
let optimize = !repr.inhibit_struct_field_reordering_opt();
|
||||
let optimize = !repr.inhibit_struct_field_reordering();
|
||||
if optimize && fields.len() > 1 {
|
||||
let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
|
||||
let optimizing = &mut inverse_memory_index.raw[..end];
|
||||
|
@ -1007,13 +1007,15 @@ fn univariant<
|
|||
// Calculates a sort key to group fields by their alignment or possibly some
|
||||
// size-derived pseudo-alignment.
|
||||
let alignment_group_key = |layout: &F| {
|
||||
// The two branches here return values that cannot be meaningfully compared with
|
||||
// each other. However, we know that consistently for all executions of
|
||||
// `alignment_group_key`, one or the other branch will be taken, so this is okay.
|
||||
if let Some(pack) = pack {
|
||||
// Return the packed alignment in bytes.
|
||||
layout.align.abi.min(pack).bytes()
|
||||
} else {
|
||||
// Returns `log2(effective-align)`. This is ok since `pack` applies to all
|
||||
// fields equally. The calculation assumes that size is an integer multiple of
|
||||
// align, except for ZSTs.
|
||||
// Returns `log2(effective-align)`. The calculation assumes that size is an
|
||||
// integer multiple of align, except for ZSTs.
|
||||
let align = layout.align.abi.bytes();
|
||||
let size = layout.size.bytes();
|
||||
let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
|
||||
|
|
|
@ -137,23 +137,16 @@ impl ReprOptions {
|
|||
self.c() || self.int.is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
|
||||
/// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
|
||||
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
|
||||
if let Some(pack) = self.pack {
|
||||
if pack.bytes() == 1 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this `#[repr()]` guarantees a fixed field order,
|
||||
/// e.g. `repr(C)` or `repr(<int>)`.
|
||||
pub fn inhibit_struct_field_reordering(&self) -> bool {
|
||||
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
|
||||
/// was enabled for its declaration crate.
|
||||
pub fn can_randomize_type_layout(&self) -> bool {
|
||||
!self.inhibit_struct_field_reordering_opt()
|
||||
&& self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
||||
!self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
||||
}
|
||||
|
||||
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
|
||||
|
|
|
@ -278,7 +278,7 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx>
|
|||
ty = sized_ty;
|
||||
continue;
|
||||
}
|
||||
if def.repr().inhibit_struct_field_reordering_opt() {
|
||||
if def.repr().inhibit_struct_field_reordering() {
|
||||
ReducedTy::OrderedFields(Some(sized_ty))
|
||||
} else {
|
||||
ReducedTy::UnorderedFields(ty)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
// We use packed structs to get around alignment restrictions
|
||||
#[repr(packed)]
|
||||
#[repr(C, packed)]
|
||||
struct Data {
|
||||
pad: u8,
|
||||
ptr: &'static i32,
|
||||
|
|
|
@ -7,7 +7,7 @@ pub struct Aligned {
|
|||
_pad: [u8; 11],
|
||||
packed: Packed,
|
||||
}
|
||||
#[repr(packed)]
|
||||
#[repr(C, packed)]
|
||||
#[derive(Default, Copy, Clone)]
|
||||
pub struct Packed {
|
||||
_pad: [u8; 5],
|
||||
|
|
|
@ -7,7 +7,7 @@ fn main() {
|
|||
FOO;
|
||||
}
|
||||
|
||||
#[repr(packed)]
|
||||
#[repr(C, packed)]
|
||||
struct Packed {
|
||||
a: [u8; 28],
|
||||
b: &'static i32,
|
||||
|
|
Loading…
Reference in New Issue