update for rustc memory changes; fix (un)init intrinsic for ZST

This commit is contained in:
Ralf Jung 2018-08-18 11:59:28 +02:00
parent ea27e46a38
commit 6203bf445f
2 changed files with 34 additions and 31 deletions

View file

@ -498,7 +498,7 @@ impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, '
// Hook pthread calls that go to the thread-local storage memory subsystem
"pthread_key_create" => {
let key_ptr = self.read_scalar(args[0])?.not_undef()?;
let key_ptr = self.read_scalar(args[0])?.to_ptr()?;
// Extract the function type out of the signature (that seems easier than constructing it ourselves...)
let dtor = match self.read_scalar(args[1])?.not_undef()? {
@ -525,7 +525,6 @@ impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, '
key_layout.align,
Scalar::from_uint(key, key_layout.size).into(),
key_layout.size,
key_layout.align,
)?;
// Return success (0)

View file

@ -295,21 +295,23 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
"init" => {
// Check fast path: we don't want to force an allocation in case the destination is a simple value,
// but we also do not want to create a new allocation with 0s and then copy that over.
match dest.layout.abi {
layout::Abi::Scalar(ref s) => {
let x = Scalar::null(s.value.size(&self));
self.write_value(Value::Scalar(x.into()), dest)?;
}
layout::Abi::ScalarPair(ref s1, ref s2) => {
let x = Scalar::null(s1.value.size(&self));
let y = Scalar::null(s2.value.size(&self));
self.write_value(Value::ScalarPair(x.into(), y.into()), dest)?;
}
_ => {
// Do it in memory
let mplace = self.force_allocation(dest)?;
assert_eq!(mplace.extra, PlaceExtra::None);
self.memory.write_repeat(mplace.ptr, 0, dest.layout.size)?;
if !dest.layout.is_zst() { // notzhing to do for ZST
match dest.layout.abi {
layout::Abi::Scalar(ref s) => {
let x = Scalar::null(s.value.size(&self));
self.write_value(Value::Scalar(x.into()), dest)?;
}
layout::Abi::ScalarPair(ref s1, ref s2) => {
let x = Scalar::null(s1.value.size(&self));
let y = Scalar::null(s2.value.size(&self));
self.write_value(Value::ScalarPair(x.into(), y.into()), dest)?;
}
_ => {
// Do it in memory
let mplace = self.force_allocation(dest)?;
assert_eq!(mplace.extra, PlaceExtra::None);
self.memory.write_repeat(mplace.ptr, 0, dest.layout.size)?;
}
}
}
}
@ -571,20 +573,22 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
"uninit" => {
// Check fast path: we don't want to force an allocation in case the destination is a simple value,
// but we also do not want to create a new allocation with 0s and then copy that over.
match dest.layout.abi {
layout::Abi::Scalar(..) => {
let x = ScalarMaybeUndef::Undef;
self.write_value(Value::Scalar(x), dest)?;
}
layout::Abi::ScalarPair(..) => {
let x = ScalarMaybeUndef::Undef;
self.write_value(Value::ScalarPair(x, x), dest)?;
}
_ => {
// Do it in memory
let mplace = self.force_allocation(dest)?;
assert_eq!(mplace.extra, PlaceExtra::None);
self.memory.mark_definedness(mplace.ptr, dest.layout.size, false)?;
if !dest.layout.is_zst() { // nothing to do for ZST
match dest.layout.abi {
layout::Abi::Scalar(..) => {
let x = ScalarMaybeUndef::Undef;
self.write_value(Value::Scalar(x), dest)?;
}
layout::Abi::ScalarPair(..) => {
let x = ScalarMaybeUndef::Undef;
self.write_value(Value::ScalarPair(x, x), dest)?;
}
_ => {
// Do it in memory
let mplace = self.force_allocation(dest)?;
assert_eq!(mplace.extra, PlaceExtra::None);
self.memory.mark_definedness(mplace.ptr.to_ptr()?, dest.layout.size, false)?;
}
}
}
}