mirror of https://github.com/rust-lang/rust.git
adjust constValue::Slice to work for arbitrary slice types
This commit is contained in:
parent
0692db1a90
commit
ea22adbabd
|
@ -184,15 +184,11 @@ pub(crate) fn codegen_const_value<'tcx>(
|
|||
.offset_i64(fx, i64::try_from(offset.bytes()).unwrap()),
|
||||
layout,
|
||||
),
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
ConstValue::Slice { data, meta } => {
|
||||
let alloc_id = fx.tcx.reserve_and_set_memory_alloc(data);
|
||||
let ptr = pointer_for_allocation(fx, alloc_id)
|
||||
.offset_i64(fx, i64::try_from(start).unwrap())
|
||||
.get_addr(fx);
|
||||
let len = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.iconst(fx.pointer_type, i64::try_from(end.checked_sub(start).unwrap()).unwrap());
|
||||
let ptr = pointer_for_allocation(fx, alloc_id).get_addr(fx);
|
||||
// FIXME: the `try_from` here can actually fail, e.g. for very long ZST slices.
|
||||
let len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(meta).unwrap());
|
||||
CValue::by_val_pair(ptr, len, layout)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,15 +100,12 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
OperandValue::Immediate(llval)
|
||||
}
|
||||
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
ConstValue::Slice { data, meta } => {
|
||||
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
|
||||
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
|
||||
};
|
||||
let a = Scalar::from_pointer(
|
||||
Pointer::new(
|
||||
bx.tcx().reserve_and_set_memory_alloc(data),
|
||||
Size::from_bytes(start),
|
||||
),
|
||||
Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data), Size::ZERO),
|
||||
&bx.tcx(),
|
||||
);
|
||||
let a_llval = bx.scalar_to_backend(
|
||||
|
@ -116,7 +113,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
a_scalar,
|
||||
bx.scalar_pair_element_backend_type(layout, 0, true),
|
||||
);
|
||||
let b_llval = bx.const_usize((end - start) as u64);
|
||||
let b_llval = bx.const_usize(meta);
|
||||
OperandValue::Pair(a_llval, b_llval)
|
||||
}
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
|
|
|
@ -151,19 +151,26 @@ pub(super) fn op_to_const<'tcx>(
|
|||
Immediate::Scalar(x) => ConstValue::Scalar(x),
|
||||
Immediate::ScalarPair(a, b) => {
|
||||
debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
|
||||
// FIXME: assert that this has an appropriate type.
|
||||
// Currently we actually get here for non-[u8] slices during valtree construction!
|
||||
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to actually allocated memory";
|
||||
// This codepath solely exists for `valtree_to_const_value` to not need to generate
|
||||
// a `ConstValue::Indirect` for wide references, so it is tightly restricted to just
|
||||
// that case.
|
||||
let pointee_ty = imm.layout.ty.builtin_deref(false).unwrap().ty; // `false` = no raw ptrs
|
||||
debug_assert!(
|
||||
matches!(
|
||||
ecx.tcx.struct_tail_without_normalization(pointee_ty).kind(),
|
||||
ty::Str | ty::Slice(..),
|
||||
),
|
||||
"`ConstValue::Slice` is for slice-tailed types only, but got {}",
|
||||
imm.layout.ty,
|
||||
);
|
||||
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to the beginning of an actual allocation";
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
// We use `ConstValue::Slice` so that we don't have to generate an allocation for
|
||||
// `ConstValue::Indirect` here.
|
||||
let (alloc_id, offset) = a.to_pointer(ecx).expect(msg).into_parts();
|
||||
let alloc_id = alloc_id.expect(msg);
|
||||
let data = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let start = offset.bytes_usize();
|
||||
let len = b.to_target_usize(ecx).expect(msg);
|
||||
let len: usize = len.try_into().unwrap();
|
||||
ConstValue::Slice { data, start, end: start + len }
|
||||
assert!(offset == abi::Size::ZERO, "{}", msg);
|
||||
let meta = b.to_target_usize(ecx).expect(msg);
|
||||
ConstValue::Slice { data, meta }
|
||||
}
|
||||
Immediate::Uninit => bug!("`Uninit` is not a valid value for {}", op.layout.ty),
|
||||
},
|
||||
|
|
|
@ -351,7 +351,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
|
||||
match (&src_pointee_ty.kind(), &dest_pointee_ty.kind()) {
|
||||
(&ty::Array(_, length), &ty::Slice(_)) => {
|
||||
let ptr = self.read_scalar(src)?;
|
||||
let ptr = self.read_pointer(src)?;
|
||||
// u64 cast is from usize to u64, which is always good
|
||||
let val = Immediate::new_slice(
|
||||
ptr,
|
||||
|
@ -367,6 +367,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
return self.write_immediate(*val, dest);
|
||||
}
|
||||
let (old_data, old_vptr) = val.to_scalar_pair();
|
||||
let old_data = old_data.to_pointer(self)?;
|
||||
let old_vptr = old_vptr.to_pointer(self)?;
|
||||
let (ty, old_trait) = self.get_ptr_vtable(old_vptr)?;
|
||||
if old_trait != data_a.principal() {
|
||||
|
@ -378,7 +379,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
(_, &ty::Dynamic(data, _, ty::Dyn)) => {
|
||||
// Initial cast from sized to dyn trait
|
||||
let vtable = self.get_vtable_ptr(src_pointee_ty, data.principal())?;
|
||||
let ptr = self.read_scalar(src)?;
|
||||
let ptr = self.read_pointer(src)?;
|
||||
let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
|
||||
self.write_immediate(val, dest)
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
|
|||
sym::type_name => {
|
||||
ensure_monomorphic_enough(tcx, tp_ty)?;
|
||||
let alloc = alloc_type_name(tcx, tp_ty);
|
||||
ConstValue::Slice { data: alloc, start: 0, end: alloc.inner().len() }
|
||||
ConstValue::Slice { data: alloc, meta: alloc.inner().size().bytes() }
|
||||
}
|
||||
sym::needs_drop => {
|
||||
ensure_monomorphic_enough(tcx, tp_ty)?;
|
||||
|
|
|
@ -43,24 +43,30 @@ impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
|
|||
}
|
||||
|
||||
impl<Prov: Provenance> Immediate<Prov> {
|
||||
pub fn from_pointer(p: Pointer<Prov>, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::Scalar(Scalar::from_pointer(p, cx))
|
||||
pub fn from_pointer(ptr: Pointer<Prov>, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::Scalar(Scalar::from_pointer(ptr, cx))
|
||||
}
|
||||
|
||||
pub fn from_maybe_pointer(p: Pointer<Option<Prov>>, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::Scalar(Scalar::from_maybe_pointer(p, cx))
|
||||
pub fn from_maybe_pointer(ptr: Pointer<Option<Prov>>, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::Scalar(Scalar::from_maybe_pointer(ptr, cx))
|
||||
}
|
||||
|
||||
pub fn new_slice(val: Scalar<Prov>, len: u64, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::ScalarPair(val, Scalar::from_target_usize(len, cx))
|
||||
pub fn new_slice(ptr: Pointer<Option<Prov>>, len: u64, cx: &impl HasDataLayout) -> Self {
|
||||
Immediate::ScalarPair(
|
||||
Scalar::from_maybe_pointer(ptr, cx),
|
||||
Scalar::from_target_usize(len, cx),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn new_dyn_trait(
|
||||
val: Scalar<Prov>,
|
||||
val: Pointer<Option<Prov>>,
|
||||
vtable: Pointer<Option<Prov>>,
|
||||
cx: &impl HasDataLayout,
|
||||
) -> Self {
|
||||
Immediate::ScalarPair(val, Scalar::from_maybe_pointer(vtable, cx))
|
||||
Immediate::ScalarPair(
|
||||
Scalar::from_maybe_pointer(val, cx),
|
||||
Scalar::from_maybe_pointer(vtable, cx),
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -722,16 +728,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
mir::ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
|
||||
mir::ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit),
|
||||
mir::ConstValue::Slice { data, start, end } => {
|
||||
mir::ConstValue::Slice { data, meta } => {
|
||||
// We rely on mutability being set correctly in `data` to prevent writes
|
||||
// where none should happen.
|
||||
let ptr = Pointer::new(
|
||||
self.tcx.reserve_and_set_memory_alloc(data),
|
||||
Size::from_bytes(start), // offset: `start`
|
||||
);
|
||||
let ptr = Pointer::new(self.tcx.reserve_and_set_memory_alloc(data), Size::ZERO);
|
||||
Operand::Immediate(Immediate::new_slice(
|
||||
Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx),
|
||||
u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
|
||||
self.global_base_pointer(ptr)?.into(),
|
||||
meta,
|
||||
self,
|
||||
))
|
||||
}
|
||||
|
|
|
@ -41,12 +41,20 @@ pub enum ConstValue<'tcx> {
|
|||
/// Only for ZSTs.
|
||||
ZeroSized,
|
||||
|
||||
/// Used for `&[u8]` and `&str`.
|
||||
/// Used for references to unsized types with slice tail.
|
||||
///
|
||||
/// This is worth an optimized representation since Rust has literals of these types.
|
||||
/// Not having to indirect those through an `AllocId` (or two, if we used `Indirect`) has shown
|
||||
/// measurable performance improvements on stress tests.
|
||||
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
|
||||
/// This is worth an optimized representation since Rust has literals of type `&str` and
|
||||
/// `&[u8]`. Not having to indirect those through an `AllocId` (or two, if we used `Indirect`)
|
||||
/// has shown measurable performance improvements on stress tests. We then reuse this
|
||||
/// optimization for slice-tail types more generally during valtree-to-constval conversion.
|
||||
Slice {
|
||||
/// The allocation storing the slice contents.
|
||||
/// This always points to the beginning of the allocation.
|
||||
data: ConstAllocation<'tcx>,
|
||||
/// The metadata field of the reference.
|
||||
/// This is a "target usize", so we use `u64` as in the interpreter.
|
||||
meta: u64,
|
||||
},
|
||||
|
||||
/// A value not representable by the other variants; needs to be stored in-memory.
|
||||
///
|
||||
|
@ -65,7 +73,7 @@ pub enum ConstValue<'tcx> {
|
|||
}
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
static_assert_size!(ConstValue<'_>, 32);
|
||||
static_assert_size!(ConstValue<'_>, 24);
|
||||
|
||||
impl<'tcx> ConstValue<'tcx> {
|
||||
#[inline]
|
||||
|
@ -124,7 +132,7 @@ impl<'tcx> ConstValue<'tcx> {
|
|||
ConstValue::Scalar(_) | ConstValue::ZeroSized => {
|
||||
bug!("`try_get_slice_bytes` on non-slice constant")
|
||||
}
|
||||
&ConstValue::Slice { data, start, end } => (data, start, end),
|
||||
&ConstValue::Slice { data, meta } => (data, 0, meta),
|
||||
&ConstValue::Indirect { alloc_id, offset } => {
|
||||
// The reference itself is stored behind an indirection.
|
||||
// Load the reference, and then load the actual slice contents.
|
||||
|
@ -151,18 +159,19 @@ impl<'tcx> ConstValue<'tcx> {
|
|||
)
|
||||
.ok()?;
|
||||
let len = len.to_target_usize(&tcx).ok()?;
|
||||
let len: usize = len.try_into().ok()?;
|
||||
if len == 0 {
|
||||
return Some(&[]);
|
||||
}
|
||||
// Non-empty slice, must have memory. We know this is a relative pointer.
|
||||
let (inner_alloc_id, offset) = ptr.into_parts();
|
||||
let data = tcx.global_alloc(inner_alloc_id?).unwrap_memory();
|
||||
(data, offset.bytes_usize(), offset.bytes_usize() + len)
|
||||
(data, offset.bytes(), offset.bytes() + len)
|
||||
}
|
||||
};
|
||||
|
||||
// This is for diagnostics only, so we are okay to use `inspect_with_uninit_and_ptr_outside_interpreter`.
|
||||
let start = start.try_into().unwrap();
|
||||
let end = end.try_into().unwrap();
|
||||
Some(data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -457,6 +457,7 @@ TrivialLiftImpls! {
|
|||
(),
|
||||
bool,
|
||||
usize,
|
||||
u64,
|
||||
}
|
||||
|
||||
// For some things about which the type library does not know, or does not
|
||||
|
|
|
@ -131,14 +131,14 @@ fn lit_to_mir_constant<'tcx>(
|
|||
let s = s.as_str();
|
||||
let allocation = Allocation::from_bytes_byte_aligned_immutable(s.as_bytes());
|
||||
let allocation = tcx.mk_const_alloc(allocation);
|
||||
ConstValue::Slice { data: allocation, start: 0, end: s.len() }
|
||||
ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() }
|
||||
}
|
||||
(ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _))
|
||||
if matches!(inner_ty.kind(), ty::Slice(_)) =>
|
||||
{
|
||||
let allocation = Allocation::from_bytes_byte_aligned_immutable(data as &[u8]);
|
||||
let allocation = tcx.mk_const_alloc(allocation);
|
||||
ConstValue::Slice { data: allocation, start: 0, end: data.len() }
|
||||
ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() }
|
||||
}
|
||||
(ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => {
|
||||
let id = tcx.allocate_bytes(data);
|
||||
|
@ -148,7 +148,7 @@ fn lit_to_mir_constant<'tcx>(
|
|||
{
|
||||
let allocation = Allocation::from_bytes_byte_aligned_immutable(data as &[u8]);
|
||||
let allocation = tcx.mk_const_alloc(allocation);
|
||||
ConstValue::Slice { data: allocation, start: 0, end: data.len() }
|
||||
ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() }
|
||||
}
|
||||
(ast::LitKind::Byte(n), ty::Uint(ty::UintTy::U8)) => {
|
||||
ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1)))
|
||||
|
|
|
@ -1449,7 +1449,7 @@ fn collect_const_value<'tcx>(
|
|||
collect_alloc(tcx, ptr.provenance, output)
|
||||
}
|
||||
mir::ConstValue::Indirect { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
|
||||
mir::ConstValue::Slice { data, start: _, end: _ } => {
|
||||
mir::ConstValue::Slice { data, meta: _ } => {
|
||||
for &id in data.inner().provenance().ptrs().values() {
|
||||
collect_alloc(tcx, id, output);
|
||||
}
|
||||
|
|
|
@ -47,14 +47,12 @@ pub fn new_allocation<'tcx>(
|
|||
tables.tcx.layout_of(rustc_middle::ty::ParamEnv::empty().and(ty)).unwrap().align;
|
||||
new_empty_allocation(align.abi)
|
||||
}
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
ConstValue::Slice { data, meta } => {
|
||||
let alloc_id = tables.tcx.reserve_and_set_memory_alloc(data);
|
||||
let ptr = Pointer::new(alloc_id, rustc_target::abi::Size::from_bytes(start));
|
||||
let ptr = Pointer::new(alloc_id, rustc_target::abi::Size::ZERO);
|
||||
let scalar_ptr = rustc_middle::mir::interpret::Scalar::from_pointer(ptr, &tables.tcx);
|
||||
let scalar_len = rustc_middle::mir::interpret::Scalar::from_target_usize(
|
||||
(end - start) as u64,
|
||||
&tables.tcx,
|
||||
);
|
||||
let scalar_meta =
|
||||
rustc_middle::mir::interpret::Scalar::from_target_usize(meta, &tables.tcx);
|
||||
let layout =
|
||||
tables.tcx.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(ty)).unwrap();
|
||||
let mut allocation =
|
||||
|
@ -69,8 +67,8 @@ pub fn new_allocation<'tcx>(
|
|||
allocation
|
||||
.write_scalar(
|
||||
&tables.tcx,
|
||||
alloc_range(tables.tcx.data_layout.pointer_size, scalar_len.size()),
|
||||
scalar_len,
|
||||
alloc_range(tables.tcx.data_layout.pointer_size, scalar_meta.size()),
|
||||
scalar_meta,
|
||||
)
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
|
|
|
@ -89,7 +89,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
|
|||
}
|
||||
|
||||
this.write_immediate(
|
||||
Immediate::new_slice(Scalar::from_maybe_pointer(alloc.ptr(), this), len, this),
|
||||
Immediate::new_slice(alloc.ptr(), len, this),
|
||||
dest,
|
||||
)?;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue