Auto merge of #148766 - cjgillot:mir-const-runtime-checks, r=RalfJung,saethlin

Replace Rvalue::NullaryOp by a variant in mir::Operand.

Based on https://github.com/rust-lang/rust/pull/148151

This PR fully removes the MIR `Rvalue::NullaryOp`. After rust-lang/rust#148151, it was only useful for runtime checks like `ub_checks`, `contract_checks` and `overflow_checks`.

These are "runtime" checks, boolean constants that may only be `true` in codegen. It depends on a rustc flag passed to codegen, so we need to represent those flags cross-crate.

This PR replaces those runtime checks by special variants in MIR `ConstValue`. This allows code that expects constants to manipulate those as such, even if we may not always be able to evaluate them to actual scalars.
This commit is contained in:
bors 2025-12-22 06:58:28 +00:00
commit 000ccd651d
65 changed files with 1103 additions and 409 deletions

View file

@ -764,6 +764,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
{
// Just point to the function, to reduce the chance of overlapping spans.
let function_span = match func {
Operand::RuntimeChecks(_) => span,
Operand::Constant(c) => c.span,
Operand::Copy(place) | Operand::Move(place) => {
if let Some(l) = place.as_local() {
@ -809,6 +810,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
{
// Just point to the function, to reduce the chance of overlapping spans.
let function_span = match func {
Operand::RuntimeChecks(_) => span,
Operand::Constant(c) => c.span,
Operand::Copy(place) | Operand::Move(place) => {
if let Some(l) = place.as_local() {

View file

@ -1559,10 +1559,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
self.consume_operand(location, (operand2, span), state);
}
Rvalue::NullaryOp(_op) => {
// nullary ops take no dynamic input; no borrowck effect.
}
Rvalue::Aggregate(aggregate_kind, operands) => {
// We need to report back the list of mutable upvars that were
// moved into the closure and subsequently used by the closure,
@ -1699,7 +1695,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
_ => propagate_closure_used_mut_place(self, place),
}
}
Operand::Constant(..) => {}
Operand::Constant(..) | Operand::RuntimeChecks(_) => {}
}
}
@ -1750,7 +1746,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
state,
);
}
Operand::Constant(_) => {}
Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
}
}

View file

@ -247,7 +247,7 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> {
LocalMutationIsAllowed::Yes,
);
}
Operand::Constant(_) => {}
Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
}
}
@ -314,8 +314,6 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> {
self.consume_operand(location, operand2);
}
Rvalue::NullaryOp(_op) => {}
Rvalue::Aggregate(_, operands) => {
for operand in operands {
self.consume_operand(location, operand);

View file

@ -1023,7 +1023,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
// element, so we require the `Copy` trait.
if len.try_to_target_usize(tcx).is_none_or(|len| len > 1) {
match operand {
Operand::Copy(..) | Operand::Constant(..) => {
Operand::Copy(..) | Operand::Constant(..) | Operand::RuntimeChecks(_) => {
// These are always okay: direct use of a const, or a value that can
// evidently be copied.
}
@ -1046,8 +1046,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
}
}
&Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::ShallowInitBox(_operand, ty) => {
let trait_ref =
ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::Sized, span), [*ty]);
@ -2276,7 +2274,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
| Rvalue::Cast(..)
| Rvalue::ShallowInitBox(..)
| Rvalue::BinaryOp(..)
| Rvalue::NullaryOp(..)
| Rvalue::CopyForDeref(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..)

View file

@ -10,7 +10,7 @@ use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_index::IndexVec;
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::adjustment::PointerCoercion;
use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv};
use rustc_middle::ty::layout::FnAbiOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_session::config::OutputFilenames;
use rustc_span::Symbol;
@ -853,17 +853,6 @@ fn codegen_stmt<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, cur_block: Block, stmt:
fx.bcx.ins().nop();
}
}
Rvalue::NullaryOp(ref null_op) => {
assert!(lval.layout().ty.is_sized(fx.tcx, fx.typing_env()));
let val = match null_op {
NullOp::RuntimeChecks(kind) => kind.value(fx.tcx.sess),
};
let val = CValue::by_val(
fx.bcx.ins().iconst(types::I8, i64::from(val)),
fx.layout_of(fx.tcx.types.bool),
);
lval.write_cvalue(fx, val);
}
Rvalue::Aggregate(ref kind, ref operands)
if matches!(**kind, AggregateKind::RawPtr(..)) =>
{
@ -1050,6 +1039,11 @@ pub(crate) fn codegen_operand<'tcx>(
cplace.to_cvalue(fx)
}
Operand::Constant(const_) => crate::constant::codegen_constant_operand(fx, const_),
Operand::RuntimeChecks(checks) => {
let val = checks.value(fx.tcx.sess);
let layout = fx.layout_of(fx.tcx.types.bool);
return CValue::const_val(fx, layout, val.into());
}
}
}

View file

@ -540,6 +540,7 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
operand: &Operand<'tcx>,
) -> Option<ScalarInt> {
match operand {
Operand::RuntimeChecks(checks) => Some(checks.value(fx.tcx.sess).into()),
Operand::Constant(const_) => eval_mir_constant(fx, const_).0.try_to_scalar_int(),
// FIXME(rust-lang/rust#85105): Casts like `IMM8 as u32` result in the const being stored
// inside a temporary before being passed to the intrinsic requiring the const argument.

View file

@ -1056,6 +1056,17 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandRef { move_annotation, ..self.codegen_consume(bx, place.as_ref()) }
}
mir::Operand::RuntimeChecks(checks) => {
let layout = bx.layout_of(bx.tcx().types.bool);
let BackendRepr::Scalar(scalar) = layout.backend_repr else {
bug!("from_const: invalid ByVal layout: {:#?}", layout);
};
let x = Scalar::from_bool(checks.value(bx.tcx().sess));
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
let val = OperandValue::Immediate(llval);
OperandRef { val, layout, move_annotation: None }
}
mir::Operand::Constant(ref constant) => {
let constant_ty = self.monomorphize(constant.ty());
// Most SIMD vector constants should be passed as immediates.

View file

@ -619,21 +619,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
mir::Rvalue::NullaryOp(ref null_op) => {
let val = match null_op {
mir::NullOp::RuntimeChecks(kind) => {
let val = kind.value(bx.tcx().sess);
bx.cx().const_bool(val)
}
};
let tcx = self.cx.tcx();
OperandRef {
val: OperandValue::Immediate(val),
layout: self.cx.layout_of(null_op.ty(tcx)),
move_annotation: None,
}
}
mir::Rvalue::ThreadLocalRef(def_id) => {
assert!(bx.cx().tcx().is_static(def_id));
let layout = bx.layout_of(bx.cx().tcx().static_ptr_ty(def_id, bx.typing_env()));

View file

@ -645,7 +645,6 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
Rvalue::Cast(_, _, _) => {}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::ShallowInitBox(_, _) => {}
Rvalue::UnaryOp(op, operand) => {

View file

@ -230,9 +230,7 @@ where
F: FnMut(Local) -> bool,
{
match rvalue {
Rvalue::ThreadLocalRef(_) | Rvalue::NullaryOp(..) => {
Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx))
}
Rvalue::ThreadLocalRef(_) => Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx)),
Rvalue::Discriminant(place) => in_place::<Q, _>(cx, in_local, place.as_ref()),
@ -340,6 +338,7 @@ where
Operand::Copy(place) | Operand::Move(place) => {
return in_place::<Q, _>(cx, in_local, place.as_ref());
}
Operand::RuntimeChecks(_) => return Q::in_any_value_of_ty(cx, cx.tcx.types.bool),
Operand::Constant(c) => c,
};

View file

@ -198,7 +198,6 @@ where
| mir::Rvalue::ThreadLocalRef(..)
| mir::Rvalue::Repeat(..)
| mir::Rvalue::BinaryOp(..)
| mir::Rvalue::NullaryOp(..)
| mir::Rvalue::UnaryOp(..)
| mir::Rvalue::Discriminant(..)
| mir::Rvalue::Aggregate(..)

View file

@ -122,6 +122,13 @@ impl<'tcx> interpret::Machine<'tcx> for DummyMachine {
unimplemented!()
}
#[inline(always)]
fn runtime_checks(_ecx: &InterpCx<'tcx, Self>, r: RuntimeChecks) -> InterpResult<'tcx, bool> {
// Runtime checks have different value depending on the crate they are codegenned in.
// Verify we aren't trying to evaluate them in mir-optimizations.
panic!("compiletime machine evaluated {r:?}")
}
fn binary_ptr_op(
ecx: &InterpCx<'tcx, Self>,
bin_op: BinOp,

View file

@ -637,6 +637,16 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
Err(ConstEvalErrKind::AssertFailure(err)).into()
}
#[inline(always)]
fn runtime_checks(
_ecx: &InterpCx<'tcx, Self>,
_r: mir::RuntimeChecks,
) -> InterpResult<'tcx, bool> {
// We can't look at `tcx.sess` here as that can differ across crates, which can lead to
// unsound differences in evaluating the same constant at different instantiation sites.
interp_ok(true)
}
fn binary_ptr_op(
_ecx: &InterpCx<'tcx, Self>,
_bin_op: mir::BinOp,

View file

@ -298,7 +298,7 @@ pub trait Machine<'tcx>: Sized {
interp_ok(())
}
/// Determines the result of a `NullaryOp::RuntimeChecks` invocation.
/// Determines the result of a `Operand::RuntimeChecks` invocation.
fn runtime_checks(
_ecx: &InterpCx<'tcx, Self>,
r: mir::RuntimeChecks,
@ -680,16 +680,6 @@ pub macro compile_time_machine(<$tcx: lifetime>) {
true
}
#[inline(always)]
fn runtime_checks(
_ecx: &InterpCx<$tcx, Self>,
_r: mir::RuntimeChecks,
) -> InterpResult<$tcx, bool> {
// We can't look at `tcx.sess` here as that can differ across crates, which can lead to
// unsound differences in evaluating the same constant at different instantiation sites.
interp_ok(true)
}
#[inline(always)]
fn adjust_global_allocation<'b>(
_ecx: &InterpCx<$tcx, Self>,

View file

@ -845,6 +845,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// FIXME: do some more logic on `move` to invalidate the old location
&Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
&RuntimeChecks(checks) => {
let val = M::runtime_checks(self, checks)?;
ImmTy::from_bool(val, self.tcx()).into()
}
Constant(constant) => {
let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(
constant.const_,

View file

@ -1,7 +1,6 @@
use either::Either;
use rustc_abi::Size;
use rustc_apfloat::{Float, FloatConvert};
use rustc_middle::mir::NullOp;
use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{self, FloatTy, ScalarInt};
@ -505,11 +504,4 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
}
}
}
pub fn nullary_op(&self, null_op: NullOp) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
use rustc_middle::mir::NullOp::*;
interp_ok(match null_op {
RuntimeChecks(r) => ImmTy::from_bool(M::runtime_checks(self, r)?, *self.tcx),
})
}
}

View file

@ -203,11 +203,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
self.write_immediate(*result, &dest)?;
}
NullaryOp(null_op) => {
let val = self.nullary_op(null_op)?;
self.write_immediate(*val, &dest)?;
}
Aggregate(box ref kind, ref operands) => {
self.write_aggregate(kind, operands, &dest)?;
}
@ -392,7 +387,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
move_definitely_disjoint: bool,
) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
interp_ok(match op {
mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
mir::Operand::Copy(_) | mir::Operand::Constant(_) | mir::Operand::RuntimeChecks(_) => {
// Make a regular copy.
let op = self.eval_operand(op, None)?;
FnArg::Copy(op)

View file

@ -620,6 +620,10 @@ impl<'tcx> Body<'tcx> {
let bits = eval_mono_const(constant)?;
return Some((bits, targets));
}
Operand::RuntimeChecks(check) => {
let bits = check.value(tcx.sess) as u128;
return Some((bits, targets));
}
Operand::Move(place) | Operand::Copy(place) => place,
};
@ -649,9 +653,6 @@ impl<'tcx> Body<'tcx> {
}
match rvalue {
Rvalue::NullaryOp(NullOp::RuntimeChecks(kind)) => {
Some((kind.value(tcx.sess) as u128, targets))
}
Rvalue::Use(Operand::Constant(constant)) => {
let bits = eval_mono_const(constant)?;
Some((bits, targets))

View file

@ -1097,15 +1097,6 @@ impl<'tcx> Debug for Rvalue<'tcx> {
BinaryOp(ref op, box (ref a, ref b)) => write!(fmt, "{op:?}({a:?}, {b:?})"),
UnaryOp(ref op, ref a) => write!(fmt, "{op:?}({a:?})"),
Discriminant(ref place) => write!(fmt, "discriminant({place:?})"),
NullaryOp(ref op) => match op {
NullOp::RuntimeChecks(RuntimeChecks::UbChecks) => write!(fmt, "UbChecks()"),
NullOp::RuntimeChecks(RuntimeChecks::ContractChecks) => {
write!(fmt, "ContractChecks()")
}
NullOp::RuntimeChecks(RuntimeChecks::OverflowChecks) => {
write!(fmt, "OverflowChecks()")
}
},
ThreadLocalRef(did) => ty::tls::with(|tcx| {
let muta = tcx.static_mutability(did).unwrap().prefix_str();
write!(fmt, "&/*tls*/ {}{}", muta, tcx.def_path_str(did))
@ -1264,6 +1255,7 @@ impl<'tcx> Debug for Operand<'tcx> {
Constant(ref a) => write!(fmt, "{a:?}"),
Copy(ref place) => write!(fmt, "copy {place:?}"),
Move(ref place) => write!(fmt, "move {place:?}"),
RuntimeChecks(checks) => write!(fmt, "{checks:?}"),
}
}
}

View file

@ -642,7 +642,7 @@ impl<'tcx> Operand<'tcx> {
pub fn to_copy(&self) -> Self {
match *self {
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
Operand::Copy(_) | Operand::Constant(_) | Operand::RuntimeChecks(_) => self.clone(),
Operand::Move(place) => Operand::Copy(place),
}
}
@ -652,7 +652,7 @@ impl<'tcx> Operand<'tcx> {
pub fn place(&self) -> Option<Place<'tcx>> {
match self {
Operand::Copy(place) | Operand::Move(place) => Some(*place),
Operand::Constant(_) => None,
Operand::Constant(_) | Operand::RuntimeChecks(_) => None,
}
}
@ -661,7 +661,7 @@ impl<'tcx> Operand<'tcx> {
pub fn constant(&self) -> Option<&ConstOperand<'tcx>> {
match self {
Operand::Constant(x) => Some(&**x),
Operand::Copy(_) | Operand::Move(_) => None,
Operand::Copy(_) | Operand::Move(_) | Operand::RuntimeChecks(_) => None,
}
}
@ -681,6 +681,7 @@ impl<'tcx> Operand<'tcx> {
match self {
&Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
Operand::Constant(c) => c.const_.ty(),
Operand::RuntimeChecks(_) => tcx.types.bool,
}
}
@ -693,6 +694,8 @@ impl<'tcx> Operand<'tcx> {
local_decls.local_decls()[l.local].source_info.span
}
Operand::Constant(c) => c.span,
// User code should not contain this operand, so we should not need this span.
Operand::RuntimeChecks(_) => DUMMY_SP,
}
}
}
@ -756,7 +759,6 @@ impl<'tcx> Rvalue<'tcx> {
_,
)
| Rvalue::BinaryOp(_, _)
| Rvalue::NullaryOp(_)
| Rvalue::UnaryOp(_, _)
| Rvalue::Discriminant(_)
| Rvalue::Aggregate(_, _)
@ -794,7 +796,6 @@ impl<'tcx> Rvalue<'tcx> {
op.ty(tcx, arg_ty)
}
Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => tcx.types.bool,
Rvalue::Aggregate(ref ak, ref ops) => match **ak {
AggregateKind::Array(ty) => Ty::new_array(tcx, ty, ops.len() as u64),
AggregateKind::Tuple => {
@ -858,14 +859,6 @@ impl BorrowKind {
}
}
impl NullOp {
pub fn ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self {
NullOp::RuntimeChecks(_) => tcx.types.bool,
}
}
}
impl<'tcx> UnOp {
pub fn ty(&self, tcx: TyCtxt<'tcx>, arg_ty: Ty<'tcx>) -> Ty<'tcx> {
match self {

View file

@ -1327,6 +1327,10 @@ pub enum Operand<'tcx> {
/// Constants are already semantically values, and remain unchanged.
Constant(Box<ConstOperand<'tcx>>),
/// Query the compilation session of the current crate for a particular flag. This is not quite
/// a const since its value can differ across crates within a single crate graph.
RuntimeChecks(RuntimeChecks),
}
#[derive(Clone, Copy, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
@ -1418,9 +1422,6 @@ pub enum Rvalue<'tcx> {
/// matching types and return a value of that type.
BinaryOp(BinOp, Box<(Operand<'tcx>, Operand<'tcx>)>),
/// Computes a value as described by the operation.
NullaryOp(NullOp),
/// Exactly like `BinaryOp`, but less operands.
///
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
@ -1561,12 +1562,6 @@ pub enum AggregateKind<'tcx> {
RawPtr(Ty<'tcx>, Mutability),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum NullOp {
/// Returns whether we should perform some checking at runtime.
RuntimeChecks(RuntimeChecks),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum RuntimeChecks {
/// Returns whether we should perform some UB-checking at runtime.

View file

@ -293,9 +293,9 @@ pub fn reverse_postorder<'a, 'tcx>(
/// reachable.
///
/// Such a traversal is mostly useful because it lets us skip lowering the `false` side
/// of `if <T as Trait>::CONST`, as well as [`NullOp::RuntimeChecks`].
/// of `if <T as Trait>::CONST`, as well as [`Operand::RuntimeChecks`].
///
/// [`NullOp::RuntimeChecks`]: rustc_middle::mir::NullOp::RuntimeChecks
/// [`Operand::RuntimeChecks`]: rustc_middle::mir::Operand::RuntimeChecks
pub fn mono_reachable<'a, 'tcx>(
body: &'a Body<'tcx>,
tcx: TyCtxt<'tcx>,

View file

@ -775,8 +775,6 @@ macro_rules! make_mir_visitor {
);
}
Rvalue::NullaryOp(_op) => {}
Rvalue::Aggregate(kind, operands) => {
let kind = &$($mutability)? **kind;
match kind {
@ -847,6 +845,7 @@ macro_rules! make_mir_visitor {
Operand::Constant(constant) => {
self.visit_const_operand(constant, location);
}
Operand::RuntimeChecks(_) => {}
}
}
@ -972,10 +971,7 @@ macro_rules! make_mir_visitor {
self.visit_span($(& $mutability)? *span);
match const_ {
Const::Ty(_, ct) => self.visit_ty_const($(&$mutability)? *ct, location),
Const::Val(_, ty) => {
self.visit_ty($(& $mutability)? *ty, TyContext::Location(location));
}
Const::Unevaluated(_, ty) => {
Const::Val(_, ty) | Const::Unevaluated(_, ty) => {
self.visit_ty($(& $mutability)? *ty, TyContext::Location(location));
}
}

View file

@ -243,7 +243,6 @@ TrivialTypeTraversalImpls! {
crate::mir::FakeReadCause,
crate::mir::Local,
crate::mir::MirPhase,
crate::mir::NullOp,
crate::mir::Promoted,
crate::mir::RawPtrKind,
crate::mir::RetagKind,
@ -284,6 +283,7 @@ TrivialTypeTraversalImpls! {
// interners).
TrivialTypeTraversalAndLiftImpls! {
// tidy-alphabetical-start
crate::mir::RuntimeChecks,
crate::ty::BoundTy,
crate::ty::ParamTy,
crate::ty::instance::ReifyReason,

View file

@ -261,6 +261,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> {
let value = match operand {
Operand::Constant(c) => VarDebugInfoContents::Const(*c),
Operand::Copy(p) | Operand::Move(p) => VarDebugInfoContents::Place(p),
Operand::RuntimeChecks(_) => unreachable!(),
};
let dbginfo = VarDebugInfo {
name,

View file

@ -1099,7 +1099,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Some(DropData { source_info, local, kind: DropKind::Value })
}
Operand::Constant(_) => None,
Operand::Constant(_) | Operand::RuntimeChecks(_) => None,
})
.collect();
@ -1563,7 +1563,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// look for moves of a local variable, like `MOVE(_X)`
let locals_moved = operands.iter().flat_map(|operand| match operand.node {
Operand::Copy(_) | Operand::Constant(_) => None,
Operand::Copy(_) | Operand::Constant(_) | Operand::RuntimeChecks(_) => None,
Operand::Move(place) => place.as_local(),
});

View file

@ -91,7 +91,6 @@ where
| Rvalue::ThreadLocalRef(..)
| Rvalue::Repeat(..)
| Rvalue::BinaryOp(..)
| Rvalue::NullaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..)
| Rvalue::Aggregate(..)

View file

@ -448,10 +448,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> {
}
}
Rvalue::CopyForDeref(..) => unreachable!(),
Rvalue::Ref(..)
| Rvalue::RawPtr(..)
| Rvalue::Discriminant(..)
| Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::Ref(..) | Rvalue::RawPtr(..) | Rvalue::Discriminant(..) => {}
}
}
@ -549,9 +546,10 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> {
fn gather_operand(&mut self, operand: &Operand<'tcx>) {
match *operand {
Operand::Constant(..) | Operand::Copy(..) => {} // not-a-move
// not-a-move
Operand::Constant(..) | Operand::Copy(..) | Operand::RuntimeChecks(_) => {}
// a move
Operand::Move(place) => {
// a move
self.gather_move(place);
}
}

View file

@ -60,7 +60,27 @@ impl<'b, 'tcx> CostChecker<'b, 'tcx> {
}
impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
fn visit_operand(&mut self, operand: &Operand<'tcx>, _: Location) {
match operand {
Operand::RuntimeChecks(RuntimeChecks::UbChecks) => {
if !self
.tcx
.sess
.opts
.unstable_opts
.inline_mir_preserve_debug
.unwrap_or(self.tcx.sess.ub_checks())
{
// If this is in optimized MIR it's because it's used later, so if we don't need UB
// checks this session, give a bonus here to offset the cost of the call later.
self.bonus += CALL_PENALTY;
}
}
_ => {}
}
}
fn visit_statement(&mut self, statement: &Statement<'tcx>, loc: Location) {
// Most costs are in rvalues and terminators, not in statements.
match statement.kind {
StatementKind::Intrinsic(ref ndi) => {
@ -69,35 +89,13 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
NonDivergingIntrinsic::CopyNonOverlapping(..) => CALL_PENALTY,
};
}
_ => self.super_statement(statement, location),
StatementKind::Assign(..) => self.penalty += INSTR_COST,
_ => {}
}
self.super_statement(statement, loc)
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, _location: Location) {
match rvalue {
// FIXME: Should we do the same for `OverflowChecks`?
Rvalue::NullaryOp(NullOp::RuntimeChecks(RuntimeChecks::UbChecks), ..)
if !self
.tcx
.sess
.opts
.unstable_opts
.inline_mir_preserve_debug
.unwrap_or(self.tcx.sess.ub_checks()) =>
{
// If this is in optimized MIR it's because it's used later,
// so if we don't need UB checks this session, give a bonus
// here to offset the cost of the call later.
self.bonus += CALL_PENALTY;
}
// These are essentially constants that didn't end up in an Operand,
// so treat them as also being free.
Rvalue::NullaryOp(..) => {}
_ => self.penalty += INSTR_COST,
}
}
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, _: Location) {
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, loc: Location) {
match &terminator.kind {
TerminatorKind::Drop { place, unwind, .. } => {
// If the place doesn't actually need dropping, treat it like a regular goto.
@ -126,7 +124,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
self.penalty += CALL_PENALTY;
}
TerminatorKind::SwitchInt { discr, targets } => {
if discr.constant().is_some() {
if matches!(discr, Operand::Constant(_) | Operand::RuntimeChecks(_)) {
// Not only will this become a `Goto`, but likely other
// things will be removable as unreachable.
self.bonus += CONST_SWITCH_BONUS;
@ -174,6 +172,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
bug!("{kind:?} should not be in runtime MIR");
}
}
self.super_terminator(terminator, loc)
}
}

View file

@ -211,6 +211,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
state: &mut State<FlatSet<Scalar>>,
) -> ValueOrPlace<FlatSet<Scalar>> {
match operand {
Operand::RuntimeChecks(_) => ValueOrPlace::TOP,
Operand::Constant(box constant) => {
ValueOrPlace::Value(self.handle_constant(constant, state))
}
@ -463,9 +464,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
FlatSet::Top => FlatSet::Top,
}
}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {
return ValueOrPlace::TOP;
}
Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map),
Rvalue::Use(operand) => return self.handle_operand(operand, state),
Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
@ -533,6 +531,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
operand: &Operand<'tcx>,
) {
match operand {
Operand::RuntimeChecks(_) => {}
Operand::Copy(rhs) | Operand::Move(rhs) => {
if let Some(rhs) = self.map.find(rhs.as_ref()) {
state.insert_place_idx(place, rhs, &self.map);
@ -1039,7 +1038,7 @@ impl<'tcx> MutVisitor<'tcx> for Patch<'tcx> {
self.super_operand(operand, location)
}
}
Operand::Constant(_) => {}
Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
}
}

View file

@ -117,11 +117,7 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
unreachable!()
};
// Always correct since we can only switch on `Copy` types
let parent_op = match parent_op {
Operand::Move(x) => Operand::Copy(*x),
Operand::Copy(x) => Operand::Copy(*x),
Operand::Constant(x) => Operand::Constant(x.clone()),
};
let parent_op = parent_op.to_copy();
let parent_ty = parent_op.ty(body.local_decls(), tcx);
let statements_before = bbs[parent].statements.len();
let parent_end = Location { block: parent, statement_index: statements_before };

View file

@ -136,12 +136,7 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
}
fn nth_arg_span(&self, args: &[Spanned<Operand<'tcx>>], n: usize) -> Span {
match &args[n].node {
Operand::Copy(place) | Operand::Move(place) => {
self.body.local_decls[place.local].source_info.span
}
Operand::Constant(constant) => constant.span,
}
args[n].node.span(&self.body.local_decls)
}
fn emit_lint(

View file

@ -248,7 +248,7 @@ enum Value<'a, 'tcx> {
Discriminant(VnIndex),
// Operations.
NullaryOp(NullOp),
RuntimeChecks(RuntimeChecks),
UnaryOp(UnOp, VnIndex),
BinaryOp(BinOp, VnIndex, VnIndex),
Cast {
@ -567,6 +567,8 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
_ if ty.is_zst() => ImmTy::uninit(ty).into(),
Opaque(_) => return None,
// Keep runtime check constants as symbolic.
RuntimeChecks(..) => return None,
// In general, evaluating repeat expressions just consumes a lot of memory.
// But in the special case that the element is just Immediate::Uninit, we can evaluate
@ -678,7 +680,6 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
discr_value.into()
}
NullaryOp(NullOp::RuntimeChecks(_)) => return None,
UnaryOp(un_op, operand) => {
let operand = self.eval_to_const(operand)?;
let operand = self.ecx.read_immediate(operand).discard_err()?;
@ -1004,11 +1005,16 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
location: Location,
) -> Option<VnIndex> {
match *operand {
Operand::RuntimeChecks(c) => {
Some(self.insert(self.tcx.types.bool, Value::RuntimeChecks(c)))
}
Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
let value = self.simplify_place_value(place, location)?;
if let Some(const_) = self.try_as_constant(value) {
*operand = Operand::Constant(Box::new(const_));
} else if let Value::RuntimeChecks(c) = self.get(value) {
*operand = Operand::RuntimeChecks(c);
}
Some(value)
}
@ -1031,7 +1037,6 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
let op = self.simplify_operand(op, location)?;
Value::Repeat(op, amount)
}
Rvalue::NullaryOp(op) => Value::NullaryOp(op),
Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
Rvalue::Ref(_, borrow_kind, ref mut place) => {
self.simplify_place_projection(place, location);
@ -1779,6 +1784,8 @@ impl<'tcx> VnState<'_, '_, 'tcx> {
fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
if let Some(const_) = self.try_as_constant(index) {
Some(Operand::Constant(Box::new(const_)))
} else if let Value::RuntimeChecks(c) = self.get(index) {
Some(Operand::RuntimeChecks(c))
} else if let Some(place) = self.try_as_place(index, location, false) {
self.reused_locals.insert(place.local);
Some(Operand::Copy(place))

View file

@ -4,10 +4,11 @@ use rustc_abi::ExternAbi;
use rustc_ast::attr;
use rustc_hir::LangItem;
use rustc_middle::bug;
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::*;
use rustc_middle::ty::layout::ValidityRequirement;
use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout};
use rustc_span::{DUMMY_SP, Symbol, sym};
use rustc_span::{Symbol, sym};
use crate::simplify::simplify_duplicate_switch_targets;
@ -29,22 +30,22 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify {
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let preserve_ub_checks =
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
if !preserve_ub_checks {
SimplifyUbCheck { tcx }.visit_body(body);
}
let ctx = InstSimplifyContext {
tcx,
local_decls: &body.local_decls,
typing_env: body.typing_env(tcx),
};
let preserve_ub_checks =
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
for block in body.basic_blocks.as_mut() {
for statement in block.statements.iter_mut() {
let StatementKind::Assign(box (.., rvalue)) = &mut statement.kind else {
continue;
};
if !preserve_ub_checks {
ctx.simplify_ub_check(rvalue);
}
ctx.simplify_bool_cmp(rvalue);
ctx.simplify_ref_deref(rvalue);
ctx.simplify_ptr_aggregate(rvalue);
@ -168,17 +169,6 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
}
}
fn simplify_ub_check(&self, rvalue: &mut Rvalue<'tcx>) {
// FIXME: Should we do the same for overflow checks?
let Rvalue::NullaryOp(NullOp::RuntimeChecks(RuntimeChecks::UbChecks)) = *rvalue else {
return;
};
let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks());
let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None };
*rvalue = Rvalue::Use(Operand::Constant(Box::new(constant)));
}
fn simplify_cast(&self, rvalue: &mut Rvalue<'tcx>) {
let Rvalue::Cast(kind, operand, cast_ty) = rvalue else { return };
@ -362,3 +352,26 @@ fn resolve_rust_intrinsic<'tcx>(
let intrinsic = tcx.intrinsic(def_id)?;
Some((intrinsic.name, args))
}
struct SimplifyUbCheck<'tcx> {
tcx: TyCtxt<'tcx>,
}
impl<'tcx> MutVisitor<'tcx> for SimplifyUbCheck<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
if let Operand::RuntimeChecks(RuntimeChecks::UbChecks) = operand {
*operand = Operand::Constant(Box::new(ConstOperand {
span: rustc_span::DUMMY_SP,
user_ty: None,
const_: Const::Val(
ConstValue::from_bool(self.tcx.sess.ub_checks()),
self.tcx.types.bool,
),
}));
}
}
}

View file

@ -477,6 +477,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
let Some(rhs) = self.map.find(rhs.as_ref()) else { return };
self.process_copy(lhs, rhs, state)
}
Operand::RuntimeChecks(_) => {}
}
}

View file

@ -282,6 +282,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
/// or `eval_place`, depending on the variant of `Operand` used.
fn eval_operand(&mut self, op: &Operand<'tcx>) -> Option<ImmTy<'tcx>> {
match *op {
Operand::RuntimeChecks(_) => None,
Operand::Constant(ref c) => self.eval_constant(c),
Operand::Move(place) | Operand::Copy(place) => self.eval_place(place),
}
@ -444,7 +445,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
| Rvalue::Cast(..)
| Rvalue::ShallowInitBox(..)
| Rvalue::Discriminant(..)
| Rvalue::NullaryOp(..)
| Rvalue::WrapUnsafeBinder(..) => {}
}
@ -605,8 +605,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
Ref(..) | RawPtr(..) => return None,
NullaryOp(NullOp::RuntimeChecks(_)) => return None,
ShallowInitBox(..) => return None,
Cast(ref kind, ref value, to) => match kind {

View file

@ -261,7 +261,7 @@ fn remap_mir_for_const_eval_select<'tcx>(
if context == hir::Constness::Const { called_in_const } else { called_at_rt };
let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) =
match tupled_args.node {
Operand::Constant(_) => {
Operand::Constant(_) | Operand::RuntimeChecks(_) => {
// There is no good way of extracting a tuple arg from a constant
// (const generic stuff) so we just create a temporary and deconstruct
// that.

View file

@ -88,7 +88,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Lint<'a, 'tcx> {
| Rvalue::ShallowInitBox(..)
| Rvalue::WrapUnsafeBinder(..) => true,
Rvalue::ThreadLocalRef(..)
| Rvalue::NullaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::BinaryOp(..)
| Rvalue::Ref(..)

View file

@ -35,7 +35,7 @@ impl<'tcx> crate::MirPass<'tcx> for LowerIntrinsics {
terminator.source_info,
StatementKind::Assign(Box::new((
*destination,
Rvalue::NullaryOp(NullOp::RuntimeChecks(op)),
Rvalue::Use(Operand::RuntimeChecks(op)),
))),
));
terminator.kind = TerminatorKind::Goto { target };

View file

@ -360,6 +360,10 @@ impl<'tcx> Validator<'_, 'tcx> {
match operand {
Operand::Copy(place) | Operand::Move(place) => self.validate_place(place.as_ref()),
// `RuntimeChecks` behaves different in const-eval and runtime MIR,
// so we do not promote it.
Operand::RuntimeChecks(_) => Err(Unpromotable),
// The qualifs for a constant (e.g. `HasMutInterior`) are checked in
// `validate_rvalue` upon access.
Operand::Constant(c) => {
@ -443,10 +447,6 @@ impl<'tcx> Validator<'_, 'tcx> {
self.validate_operand(operand)?;
}
Rvalue::NullaryOp(op) => match op {
NullOp::RuntimeChecks(_) => {}
},
Rvalue::ShallowInitBox(_, _) => return Err(Unpromotable),
Rvalue::UnaryOp(op, operand) => {

View file

@ -41,7 +41,7 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyConstCondition {
{
Some(const_operand)
}
Operand::Copy(_) | Operand::Move(_) => None,
Operand::Copy(_) | Operand::Move(_) | Operand::RuntimeChecks(_) => None,
}
}

View file

@ -392,11 +392,14 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> {
// a_1 = move? place.1
// ...
// ```
StatementKind::Assign(box (lhs, Rvalue::Use(ref op))) => {
let (rplace, copy) = match *op {
Operand::Copy(rplace) => (rplace, true),
Operand::Move(rplace) => (rplace, false),
Operand::Constant(_) => bug!(),
StatementKind::Assign(box (
lhs,
Rvalue::Use(ref op @ (Operand::Copy(rplace) | Operand::Move(rplace))),
)) => {
let copy = match *op {
Operand::Copy(_) => true,
Operand::Move(_) => false,
Operand::Constant(_) | Operand::RuntimeChecks(_) => bug!(),
};
if let Some(final_locals) = self.replacements.place_fragments(lhs) {
for (field, ty, new_local) in final_locals {

View file

@ -1439,7 +1439,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
Rvalue::Repeat(_, _)
| Rvalue::ThreadLocalRef(_)
| Rvalue::RawPtr(_, _)
| Rvalue::NullaryOp(NullOp::RuntimeChecks(_))
| Rvalue::Discriminant(_) => {}
Rvalue::WrapUnsafeBinder(op, ty) => {

View file

@ -587,9 +587,6 @@ pub enum Rvalue {
/// nature of this operation?
ThreadLocalRef(crate::CrateItem),
/// Computes a value as described by the operation.
NullaryOp(NullOp),
/// Exactly like `BinaryOp`, but less operands.
///
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
@ -641,7 +638,6 @@ impl Rvalue {
.discriminant_ty()
.ok_or_else(|| error!("Expected a `RigidTy` but found: {place_ty:?}"))
}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => Ok(Ty::bool_ty()),
Rvalue::Aggregate(ak, ops) => match *ak {
AggregateKind::Array(ty) => Ty::try_new_array(ty, ops.len() as u64),
AggregateKind::Tuple => Ok(Ty::new_tuple(
@ -677,6 +673,7 @@ pub enum Operand {
Copy(Place),
Move(Place),
Constant(ConstOperand),
RuntimeChecks(RuntimeChecks),
}
#[derive(Clone, Eq, PartialEq, Hash, Serialize)]
@ -699,6 +696,16 @@ pub struct ConstOperand {
pub const_: MirConst,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum RuntimeChecks {
/// cfg!(ub_checks), but at codegen time
UbChecks,
/// cfg!(contract_checks), but at codegen time
ContractChecks,
/// cfg!(overflow_checks), but at codegen time
OverflowChecks,
}
/// Debug information pertaining to a user variable.
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
pub struct VarDebugInfo {
@ -1018,22 +1025,6 @@ pub enum CastKind {
Subtype,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum NullOp {
/// Codegen conditions for runtime checks.
RuntimeChecks(RuntimeChecks),
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum RuntimeChecks {
/// cfg!(ub_checks), but at codegen time
UbChecks,
/// cfg!(contract_checks), but at codegen time
ContractChecks,
/// cfg!(overflow_checks), but at codegen time
OverflowChecks,
}
impl Operand {
/// Get the type of an operand relative to the local declaration.
///
@ -1045,6 +1036,7 @@ impl Operand {
match self {
Operand::Copy(place) | Operand::Move(place) => place.ty(locals),
Operand::Constant(c) => Ok(c.ty()),
Operand::RuntimeChecks(_) => Ok(Ty::bool_ty()),
}
}
}

View file

@ -332,6 +332,7 @@ fn pretty_operand(operand: &Operand) -> String {
format!("move {mv:?}")
}
Operand::Constant(cnst) => pretty_mir_const(&cnst.const_),
Operand::RuntimeChecks(checks) => format!("{checks:?}"),
}
}
@ -386,9 +387,6 @@ fn pretty_rvalue<W: Write>(writer: &mut W, rval: &Rvalue) -> io::Result<()> {
Rvalue::ThreadLocalRef(item) => {
write!(writer, "thread_local_ref{item:?}")
}
Rvalue::NullaryOp(nul) => {
write!(writer, "{nul:?}() \" \"")
}
Rvalue::UnaryOp(un, op) => {
write!(writer, "{:?}({})", un, pretty_operand(op))
}

View file

@ -282,7 +282,6 @@ macro_rules! make_mir_visitor {
self.visit_operand(op, location)
}
Rvalue::ThreadLocalRef(_) => {}
Rvalue::NullaryOp(_) => {}
Rvalue::UnaryOp(_, op) | Rvalue::Use(op) => {
self.visit_operand(op, location);
}
@ -297,6 +296,7 @@ macro_rules! make_mir_visitor {
Operand::Constant(constant) => {
self.visit_const_operand(constant, location);
}
Operand::RuntimeChecks(_) => {}
}
}

View file

@ -232,7 +232,6 @@ impl<'tcx> Stable<'tcx> for mir::Rvalue<'tcx> {
)
}
}
NullaryOp(null_op) => crate::mir::Rvalue::NullaryOp(null_op.stable(tables, cx)),
UnaryOp(un_op, op) => {
crate::mir::Rvalue::UnaryOp(un_op.stable(tables, cx), op.stable(tables, cx))
}
@ -312,21 +311,18 @@ impl<'tcx> Stable<'tcx> for mir::FakeBorrowKind {
}
}
impl<'tcx> Stable<'tcx> for mir::NullOp {
type T = crate::mir::NullOp;
impl<'tcx> Stable<'tcx> for mir::RuntimeChecks {
type T = crate::mir::RuntimeChecks;
fn stable<'cx>(
&self,
_: &mut Tables<'cx, BridgeTys>,
_: &CompilerCtxt<'cx, BridgeTys>,
) -> Self::T {
use rustc_middle::mir::NullOp::*;
use rustc_middle::mir::RuntimeChecks::*;
match self {
RuntimeChecks(op) => crate::mir::NullOp::RuntimeChecks(match op {
UbChecks => crate::mir::RuntimeChecks::UbChecks,
ContractChecks => crate::mir::RuntimeChecks::ContractChecks,
OverflowChecks => crate::mir::RuntimeChecks::OverflowChecks,
}),
UbChecks => crate::mir::RuntimeChecks::UbChecks,
ContractChecks => crate::mir::RuntimeChecks::ContractChecks,
OverflowChecks => crate::mir::RuntimeChecks::OverflowChecks,
}
}
}
@ -383,6 +379,7 @@ impl<'tcx> Stable<'tcx> for mir::Operand<'tcx> {
Copy(place) => crate::mir::Operand::Copy(place.stable(tables, cx)),
Move(place) => crate::mir::Operand::Move(place.stable(tables, cx)),
Constant(c) => crate::mir::Operand::Constant(c.stable(tables, cx)),
RuntimeChecks(c) => crate::mir::Operand::RuntimeChecks(c.stable(tables, cx)),
}
}
}

View file

@ -95,17 +95,16 @@ pub use intrinsics::ub_checks as check_library_ub;
#[rustc_allow_const_fn_unstable(const_eval_select)]
pub(crate) const fn check_language_ub() -> bool {
// Only used for UB checks so we may const_eval_select.
intrinsics::ub_checks()
&& const_eval_select!(
@capture { } -> bool:
if const {
// Always disable UB checks.
false
} else {
// Disable UB checks in Miri.
!cfg!(miri)
}
)
const_eval_select!(
@capture { } -> bool:
if const {
// Always disable UB checks.
false
} else {
// Disable UB checks in Miri.
!cfg!(miri)
}
) && intrinsics::ub_checks()
}
/// Checks whether `ptr` is properly aligned with respect to the given alignment, and

View file

@ -110,7 +110,7 @@ impl<'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'_, '_, 'tcx> {
immutable_borrowers.push(p.local);
}
},
mir::Operand::Constant(..) => (),
mir::Operand::Constant(..) | mir::Operand::RuntimeChecks(..) => (),
}
}
@ -151,7 +151,7 @@ fn rvalue_locals(rvalue: &mir::Rvalue<'_>, mut visit: impl FnMut(mir::Local)) {
let mut visit_op = |op: &mir::Operand<'_>| match op {
mir::Operand::Copy(p) | mir::Operand::Move(p) => visit(p.local),
mir::Operand::Constant(..) => (),
mir::Operand::Constant(..) | mir::Operand::RuntimeChecks(..) => (),
};
match rvalue {

View file

@ -13,7 +13,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::Obligation;
use rustc_lint::LateContext;
use rustc_middle::mir::{
Body, CastKind, NonDivergingIntrinsic, NullOp, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind,
Body, CastKind, NonDivergingIntrinsic, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind,
Terminator, TerminatorKind,
};
use rustc_middle::traits::{BuiltinImplSource, ImplSource, ObligationCause};
@ -194,7 +194,7 @@ fn check_rvalue<'tcx>(
))
}
},
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) | Rvalue::ShallowInitBox(_, _) => Ok(()),
Rvalue::ShallowInitBox(_, _) => Ok(()),
Rvalue::UnaryOp(_, operand) => {
let ty = operand.ty(body, cx.tcx);
if ty.is_integral() || ty.is_bool() {
@ -277,6 +277,7 @@ fn check_operand<'tcx>(
Some(_) => Err((span, "cannot access `static` items in const fn".into())),
None => Ok(()),
},
Operand::RuntimeChecks(..) => Ok(()),
}
}

View file

@ -148,8 +148,7 @@ fn usize_ilog10_range(value: usize) {
// CHECK-LABEL: @usize_ilog10_range(
// CHECK-NOT: panic
// CHECK: ret void
// CHECK-NEXT: }
// CHECK: }
assert!(value == 0 || value.ilog10() <= MAX_RESULT);
assert!(value.checked_ilog10().is_none_or(|result| result <= MAX_RESULT));
}

View file

@ -16,7 +16,6 @@
scope 4 (inlined #[track_caller] unreachable_unchecked) {
let _5: ();
scope 5 (inlined core::ub_checks::check_language_ub) {
let mut _6: bool;
scope 6 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
@ -38,9 +37,7 @@
}
bb2: {
- StorageLive(_6);
- _6 = const false;
- assume(copy _6);
- assume(const false);
- _5 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ unreachable;
}

View file

@ -13,7 +13,7 @@
let mut _11: *const ();
let mut _16: usize;
let mut _17: usize;
let mut _27: usize;
let mut _26: usize;
scope 1 {
debug vp_ctx => _1;
let _5: *const ();
@ -27,7 +27,7 @@
debug _x => _8;
}
scope 18 (inlined foo) {
let mut _28: *const [()];
let mut _27: *const [()];
}
}
scope 16 (inlined slice_from_raw_parts::<()>) {
@ -52,7 +52,7 @@
scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) {
scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) {
scope 13 (inlined NonNull::<[u8]>::cast::<u8>) {
let mut _26: *mut [u8];
let mut _25: *mut [u8];
scope 14 (inlined NonNull::<[u8]>::as_ptr) {
}
}
@ -65,9 +65,8 @@
}
}
scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) {
let mut _23: bool;
let _24: ();
let mut _25: std::ptr::Alignment;
let _23: ();
let mut _24: std::ptr::Alignment;
}
}
}
@ -94,10 +93,8 @@
StorageLive(_20);
StorageLive(_21);
StorageLive(_22);
StorageLive(_24);
StorageLive(_23);
_23 = UbChecks();
switchInt(move _23) -> [0: bb6, otherwise: bb5];
switchInt(UbChecks) -> [0: bb6, otherwise: bb5];
}
bb1: {
@ -117,14 +114,14 @@
bb4: {
_21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>);
- StorageLive(_26);
- StorageLive(_25);
+ nop;
_26 = copy _21 as *mut [u8] (Transmute);
_12 = copy _26 as *mut u8 (PtrToPtr);
- StorageDead(_26);
_25 = copy _21 as *mut [u8] (Transmute);
_12 = copy _25 as *mut u8 (PtrToPtr);
- StorageDead(_25);
+ nop;
StorageDead(_19);
StorageDead(_24);
StorageDead(_23);
StorageDead(_22);
StorageDead(_21);
StorageDead(_20);
@ -132,7 +129,7 @@
StorageDead(_17);
StorageDead(_16);
- _13 = copy _12 as *const () (PtrToPtr);
+ _13 = copy _26 as *const () (PtrToPtr);
+ _13 = copy _25 as *const () (PtrToPtr);
_14 = NonNull::<()> { pointer: copy _13 };
_15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> };
_3 = Box::<()>(move _15, const std::alloc::Global);
@ -157,21 +154,21 @@
+ nop;
StorageLive(_7);
_7 = copy _5;
StorageLive(_27);
_27 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _27);
StorageLive(_26);
_26 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _26);
+ _6 = *const [()] from (copy _5, const 1_usize);
StorageDead(_27);
StorageDead(_26);
StorageDead(_7);
StorageLive(_8);
StorageLive(_9);
_9 = copy _6;
StorageLive(_28);
- _28 = copy _9;
StorageLive(_27);
- _27 = copy _9;
- _8 = copy _9 as *mut () (PtrToPtr);
+ _28 = copy _6;
+ _27 = copy _6;
+ _8 = copy _5 as *mut () (PtrToPtr);
StorageDead(_28);
StorageDead(_27);
StorageDead(_9);
_0 = const ();
StorageDead(_8);
@ -183,18 +180,17 @@
}
bb5: {
- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
- _23 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _23 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
}
bb6: {
StorageDead(_23);
StorageLive(_25);
- _25 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _25 };
+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
StorageLive(_24);
- _24 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _24 };
+ _24 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
+ _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }};
StorageDead(_25);
StorageDead(_24);
StorageLive(_19);
- _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable];
+ _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];

View file

@ -13,7 +13,7 @@
let mut _11: *const ();
let mut _16: usize;
let mut _17: usize;
let mut _27: usize;
let mut _26: usize;
scope 1 {
debug vp_ctx => _1;
let _5: *const ();
@ -27,7 +27,7 @@
debug _x => _8;
}
scope 18 (inlined foo) {
let mut _28: *const [()];
let mut _27: *const [()];
}
}
scope 16 (inlined slice_from_raw_parts::<()>) {
@ -52,7 +52,7 @@
scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) {
scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) {
scope 13 (inlined NonNull::<[u8]>::cast::<u8>) {
let mut _26: *mut [u8];
let mut _25: *mut [u8];
scope 14 (inlined NonNull::<[u8]>::as_ptr) {
}
}
@ -65,9 +65,8 @@
}
}
scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) {
let mut _23: bool;
let _24: ();
let mut _25: std::ptr::Alignment;
let _23: ();
let mut _24: std::ptr::Alignment;
}
}
}
@ -94,10 +93,8 @@
StorageLive(_20);
StorageLive(_21);
StorageLive(_22);
StorageLive(_24);
StorageLive(_23);
_23 = UbChecks();
switchInt(move _23) -> [0: bb6, otherwise: bb5];
switchInt(UbChecks) -> [0: bb6, otherwise: bb5];
}
bb1: {
@ -117,14 +114,14 @@
bb4: {
_21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>);
- StorageLive(_26);
- StorageLive(_25);
+ nop;
_26 = copy _21 as *mut [u8] (Transmute);
_12 = copy _26 as *mut u8 (PtrToPtr);
- StorageDead(_26);
_25 = copy _21 as *mut [u8] (Transmute);
_12 = copy _25 as *mut u8 (PtrToPtr);
- StorageDead(_25);
+ nop;
StorageDead(_19);
StorageDead(_24);
StorageDead(_23);
StorageDead(_22);
StorageDead(_21);
StorageDead(_20);
@ -132,7 +129,7 @@
StorageDead(_17);
StorageDead(_16);
- _13 = copy _12 as *const () (PtrToPtr);
+ _13 = copy _26 as *const () (PtrToPtr);
+ _13 = copy _25 as *const () (PtrToPtr);
_14 = NonNull::<()> { pointer: copy _13 };
_15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> };
_3 = Box::<()>(move _15, const std::alloc::Global);
@ -157,21 +154,21 @@
+ nop;
StorageLive(_7);
_7 = copy _5;
StorageLive(_27);
_27 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _27);
StorageLive(_26);
_26 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _26);
+ _6 = *const [()] from (copy _5, const 1_usize);
StorageDead(_27);
StorageDead(_26);
StorageDead(_7);
StorageLive(_8);
StorageLive(_9);
_9 = copy _6;
StorageLive(_28);
- _28 = copy _9;
StorageLive(_27);
- _27 = copy _9;
- _8 = copy _9 as *mut () (PtrToPtr);
+ _28 = copy _6;
+ _27 = copy _6;
+ _8 = copy _5 as *mut () (PtrToPtr);
StorageDead(_28);
StorageDead(_27);
StorageDead(_9);
_0 = const ();
StorageDead(_8);
@ -183,18 +180,17 @@
}
bb5: {
- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
- _23 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _23 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
}
bb6: {
StorageDead(_23);
StorageLive(_25);
- _25 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _25 };
+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
StorageLive(_24);
- _24 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _24 };
+ _24 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
+ _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }};
StorageDead(_25);
StorageDead(_24);
StorageLive(_19);
- _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable];
+ _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl u16>::unchecked_shl) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind unreachable];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShlUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl u16>::unchecked_shl) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind continue];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShlUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl i64>::unchecked_shr) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind unreachable];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShrUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl i64>::unchecked_shr) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind continue];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShrUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -12,7 +12,6 @@
+ scope 3 (inlined #[track_caller] unreachable_unchecked) {
+ let _4: ();
+ scope 4 (inlined core::ub_checks::check_language_ub) {
+ let mut _5: bool;
+ scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -34,9 +33,7 @@
+ }
+
+ bb2: {
+ StorageLive(_5);
+ _5 = UbChecks();
+ assume(copy _5);
+ assume(UbChecks);
+ _4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ }
+

View file

@ -12,7 +12,6 @@
+ scope 3 (inlined #[track_caller] unreachable_unchecked) {
+ let _4: ();
+ scope 4 (inlined core::ub_checks::check_language_ub) {
+ let mut _5: bool;
+ scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -38,9 +37,7 @@
- bb2 (cleanup): {
- resume;
+ bb2: {
+ StorageLive(_5);
+ _5 = UbChecks();
+ assume(copy _5);
+ assume(UbChecks);
+ _4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ }
+

View file

@ -5,8 +5,7 @@
pub fn unwrap_unchecked(x: Option<i32>) -> i32 {
// CHECK-LABEL: fn unwrap_unchecked(
// CHECK-NOT: UbChecks()
// CHECK: [[assume:_.*]] = const false;
// CHECK-NEXT: assume(copy [[assume]]);
// CHECK: assume(const false);
// CHECK-NEXT: unreachable_unchecked::precondition_check
unsafe { x.unwrap_unchecked() }
}

View file

@ -12,7 +12,6 @@
scope 3 (inlined #[track_caller] unreachable_unchecked) {
let _4: ();
scope 4 (inlined core::ub_checks::check_language_ub) {
let mut _5: bool;
scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
@ -33,10 +32,8 @@
}
bb2: {
StorageLive(_5);
- _5 = UbChecks();
+ _5 = const false;
assume(copy _5);
- assume(UbChecks);
+ assume(const false);
_4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
}

View file

@ -43,10 +43,192 @@
scope 4 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _27: &std::string::String;
let mut _28: &str;
scope 5 (inlined <String as PartialEq<str>>::eq) {
scope 6 (inlined #[track_caller] <String as Index<RangeFull>>::index) {
let _29: &str;
scope 7 (inlined String::as_str) {
let _30: &[u8];
let mut _31: &std::vec::Vec<u8>;
scope 8 (inlined Vec::<u8>::as_slice) {
let mut _32: *const u8;
let mut _33: usize;
scope 9 (inlined Vec::<u8>::as_ptr) {
scope 10 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
scope 11 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) {
scope 12 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) {
let mut _34: std::ptr::NonNull<u8>;
scope 13 (inlined Unique::<u8>::cast::<u8>) {
scope 14 (inlined NonNull::<u8>::cast::<u8>) {
scope 15 (inlined NonNull::<u8>::as_ptr) {
}
}
}
scope 16 (inlined Unique::<u8>::as_non_null_ptr) {
}
}
scope 17 (inlined NonNull::<u8>::as_ptr) {
}
}
}
}
scope 18 (inlined #[track_caller] std::slice::from_raw_parts::<'_, u8>) {
let _35: ();
let mut _36: *mut ();
let _37: *const [u8];
scope 19 (inlined ub_checks::check_language_ub) {
scope 20 (inlined ub_checks::check_language_ub::runtime) {
}
}
scope 21 (inlined std::mem::size_of::<u8>) {
}
scope 22 (inlined std::mem::align_of::<u8>) {
}
scope 23 (inlined slice_from_raw_parts::<u8>) {
scope 24 (inlined std::ptr::from_raw_parts::<[u8], u8>) {
}
}
}
}
scope 25 (inlined from_utf8_unchecked) {
}
}
scope 26 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 27 (inlined #[track_caller] core::str::traits::<impl Index<RangeFull> for str>::index) {
scope 28 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 29 (inlined core::str::traits::<impl PartialEq for str>::eq) {
let mut _38: &&[u8];
let _39: &[u8];
let mut _40: &&[u8];
let _41: &[u8];
scope 30 (inlined core::str::<impl str>::as_bytes) {
}
scope 31 (inlined core::str::<impl str>::as_bytes) {
}
scope 32 (inlined std::cmp::impls::<impl PartialEq for &[u8]>::eq) {
scope 33 (inlined core::slice::cmp::<impl PartialEq for [u8]>::eq) {
scope 34 (inlined <[u8] as core::slice::cmp::SlicePartialEq<u8>>::equal) {
let mut _42: bool;
let mut _43: usize;
let mut _44: usize;
let _45: usize;
let mut _46: i32;
let mut _47: *const u8;
let mut _48: *const u8;
scope 35 {
scope 37 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _50: *const [u8];
}
scope 38 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _51: *const [u8];
}
}
scope 36 (inlined std::mem::size_of_val::<[u8]>) {
let mut _49: *const [u8];
}
}
}
}
}
}
}
scope 5 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _29: &std::string::String;
let mut _30: &str;
scope 39 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _52: &std::string::String;
let mut _53: &str;
scope 40 (inlined <String as PartialEq<str>>::eq) {
scope 41 (inlined #[track_caller] <String as Index<RangeFull>>::index) {
let _54: &str;
scope 42 (inlined String::as_str) {
let _55: &[u8];
let mut _56: &std::vec::Vec<u8>;
scope 43 (inlined Vec::<u8>::as_slice) {
let mut _57: *const u8;
let mut _58: usize;
scope 44 (inlined Vec::<u8>::as_ptr) {
scope 45 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
scope 46 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) {
scope 47 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) {
let mut _59: std::ptr::NonNull<u8>;
scope 48 (inlined Unique::<u8>::cast::<u8>) {
scope 49 (inlined NonNull::<u8>::cast::<u8>) {
scope 50 (inlined NonNull::<u8>::as_ptr) {
}
}
}
scope 51 (inlined Unique::<u8>::as_non_null_ptr) {
}
}
scope 52 (inlined NonNull::<u8>::as_ptr) {
}
}
}
}
scope 53 (inlined #[track_caller] std::slice::from_raw_parts::<'_, u8>) {
let _60: ();
let mut _61: *mut ();
let _62: *const [u8];
scope 54 (inlined ub_checks::check_language_ub) {
scope 55 (inlined ub_checks::check_language_ub::runtime) {
}
}
scope 56 (inlined std::mem::size_of::<u8>) {
}
scope 57 (inlined std::mem::align_of::<u8>) {
}
scope 58 (inlined slice_from_raw_parts::<u8>) {
scope 59 (inlined std::ptr::from_raw_parts::<[u8], u8>) {
}
}
}
}
scope 60 (inlined from_utf8_unchecked) {
}
}
scope 61 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 62 (inlined #[track_caller] core::str::traits::<impl Index<RangeFull> for str>::index) {
scope 63 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 64 (inlined core::str::traits::<impl PartialEq for str>::eq) {
let mut _63: &&[u8];
let _64: &[u8];
let mut _65: &&[u8];
let _66: &[u8];
scope 65 (inlined core::str::<impl str>::as_bytes) {
}
scope 66 (inlined core::str::<impl str>::as_bytes) {
}
scope 67 (inlined std::cmp::impls::<impl PartialEq for &[u8]>::eq) {
scope 68 (inlined core::slice::cmp::<impl PartialEq for [u8]>::eq) {
scope 69 (inlined <[u8] as core::slice::cmp::SlicePartialEq<u8>>::equal) {
let mut _67: bool;
let mut _68: usize;
let mut _69: usize;
let _70: usize;
let mut _71: i32;
let mut _72: *const u8;
let mut _73: *const u8;
scope 70 {
scope 72 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _75: *const [u8];
}
scope 73 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _76: *const [u8];
}
}
scope 71 (inlined std::mem::size_of_val::<[u8]>) {
let mut _74: *const [u8];
}
}
}
}
}
}
}
bb0: {
@ -71,7 +253,7 @@
bb3: {
_1 = chained_conditions::BacktraceStyle::Off;
- goto -> bb18;
+ goto -> bb23;
+ goto -> bb37;
}
bb4: {
@ -89,7 +271,16 @@
StorageLive(_28);
_27 = copy (*_8);
_28 = copy (*_10);
_7 = <String as PartialEq<str>>::eq(move _27, move _28) -> [return: bb19, unwind unreachable];
StorageLive(_29);
StorageLive(_35);
StorageLive(_30);
StorageLive(_34);
StorageLive(_32);
_34 = copy ((((((*_27).0: std::vec::Vec<u8>).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_32 = copy _34 as *const u8 (Transmute);
StorageLive(_33);
_33 = copy (((*_27).0: std::vec::Vec<u8>).1: usize);
switchInt(UbChecks) -> [0: bb21, otherwise: bb19];
}
bb5: {
@ -120,18 +311,27 @@
StorageLive(_17);
_20 = const chained_conditions::promoted[0];
_17 = &(*_20);
StorageLive(_29);
StorageLive(_30);
_29 = copy (*_15);
_30 = copy (*_17);
_14 = <String as PartialEq<str>>::eq(move _29, move _30) -> [return: bb20, unwind unreachable];
StorageLive(_52);
StorageLive(_53);
_52 = copy (*_15);
_53 = copy (*_17);
StorageLive(_54);
StorageLive(_60);
StorageLive(_55);
StorageLive(_59);
StorageLive(_57);
_59 = copy ((((((*_52).0: std::vec::Vec<u8>).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_57 = copy _59 as *const u8 (Transmute);
StorageLive(_58);
_58 = copy (((*_52).0: std::vec::Vec<u8>).1: usize);
switchInt(UbChecks) -> [0: bb29, otherwise: bb27];
}
bb7: {
StorageDead(_5);
StorageDead(_6);
- goto -> bb18;
+ goto -> bb21;
+ goto -> bb39;
}
bb8: {
@ -154,14 +354,14 @@
StorageDead(_13);
_1 = chained_conditions::BacktraceStyle::Short;
- goto -> bb18;
+ goto -> bb23;
+ goto -> bb37;
}
bb10: {
StorageDead(_12);
StorageDead(_13);
- goto -> bb18;
+ goto -> bb21;
+ goto -> bb39;
}
bb11: {
@ -206,33 +406,223 @@
}
bb19: {
StorageLive(_36);
_36 = copy _34 as *mut () (Transmute);
_35 = std::slice::from_raw_parts::precondition_check(move _36, const <u8 as std::mem::SizedTypeProperties>::SIZE, const <u8 as std::mem::SizedTypeProperties>::ALIGN, copy _33) -> [return: bb20, unwind unreachable];
}
bb20: {
StorageDead(_36);
goto -> bb21;
}
bb21: {
StorageLive(_37);
_37 = *const [u8] from (copy _32, copy _33);
_30 = &(*_37);
StorageDead(_37);
StorageDead(_33);
StorageDead(_32);
StorageDead(_34);
_29 = copy _30 as &str (Transmute);
StorageDead(_30);
StorageLive(_39);
StorageLive(_41);
_39 = copy _29 as &[u8] (Transmute);
_41 = copy _28 as &[u8] (Transmute);
StorageLive(_45);
StorageLive(_50);
StorageLive(_51);
StorageLive(_42);
StorageLive(_43);
_43 = PtrMetadata(copy _39);
StorageLive(_44);
_44 = PtrMetadata(copy _41);
_42 = Ne(move _43, move _44);
switchInt(move _42) -> [0: bb24, otherwise: bb23];
}
bb22: {
StorageDead(_51);
StorageDead(_50);
StorageDead(_45);
StorageDead(_41);
StorageDead(_39);
StorageDead(_35);
StorageDead(_29);
StorageDead(_28);
StorageDead(_27);
switchInt(move _7) -> [0: bb6, otherwise: bb5];
}
bb20: {
StorageDead(_30);
StorageDead(_29);
bb23: {
StorageDead(_44);
StorageDead(_43);
_7 = const false;
StorageDead(_42);
- goto -> bb22;
+ goto -> bb35;
}
bb24: {
StorageDead(_44);
StorageDead(_43);
StorageDead(_42);
StorageLive(_49);
_49 = &raw const (*_39);
_45 = std::intrinsics::size_of_val::<[u8]>(move _49) -> [return: bb26, unwind unreachable];
}
bb25: {
StorageDead(_48);
StorageDead(_47);
_7 = Eq(move _46, const 0_i32);
StorageDead(_46);
goto -> bb22;
}
bb26: {
StorageDead(_49);
StorageLive(_46);
StorageLive(_47);
_50 = &raw const (*_39);
_47 = copy _50 as *const u8 (PtrToPtr);
StorageLive(_48);
_51 = &raw const (*_41);
_48 = copy _51 as *const u8 (PtrToPtr);
_46 = compare_bytes(move _47, move _48, move _45) -> [return: bb25, unwind unreachable];
}
bb27: {
StorageLive(_61);
_61 = copy _59 as *mut () (Transmute);
_60 = std::slice::from_raw_parts::precondition_check(move _61, const <u8 as std::mem::SizedTypeProperties>::SIZE, const <u8 as std::mem::SizedTypeProperties>::ALIGN, copy _58) -> [return: bb28, unwind unreachable];
}
bb28: {
StorageDead(_61);
goto -> bb29;
}
bb29: {
StorageLive(_62);
_62 = *const [u8] from (copy _57, copy _58);
_55 = &(*_62);
StorageDead(_62);
StorageDead(_58);
StorageDead(_57);
StorageDead(_59);
_54 = copy _55 as &str (Transmute);
StorageDead(_55);
StorageLive(_64);
StorageLive(_66);
_64 = copy _54 as &[u8] (Transmute);
_66 = copy _53 as &[u8] (Transmute);
StorageLive(_70);
StorageLive(_75);
StorageLive(_76);
StorageLive(_67);
StorageLive(_68);
_68 = PtrMetadata(copy _64);
StorageLive(_69);
_69 = PtrMetadata(copy _66);
_67 = Ne(move _68, move _69);
switchInt(move _67) -> [0: bb32, otherwise: bb31];
}
bb30: {
StorageDead(_76);
StorageDead(_75);
StorageDead(_70);
StorageDead(_66);
StorageDead(_64);
StorageDead(_60);
StorageDead(_54);
StorageDead(_53);
StorageDead(_52);
switchInt(move _14) -> [0: bb9, otherwise: bb8];
}
bb31: {
StorageDead(_69);
StorageDead(_68);
_14 = const false;
StorageDead(_67);
- goto -> bb30;
+ goto -> bb36;
}
bb32: {
StorageDead(_69);
StorageDead(_68);
StorageDead(_67);
StorageLive(_74);
_74 = &raw const (*_64);
_70 = std::intrinsics::size_of_val::<[u8]>(move _74) -> [return: bb34, unwind unreachable];
}
bb33: {
StorageDead(_73);
StorageDead(_72);
_14 = Eq(move _71, const 0_i32);
StorageDead(_71);
goto -> bb30;
}
bb34: {
StorageDead(_74);
StorageLive(_71);
StorageLive(_72);
_75 = &raw const (*_64);
_72 = copy _75 as *const u8 (PtrToPtr);
StorageLive(_73);
_76 = &raw const (*_66);
_73 = copy _76 as *const u8 (PtrToPtr);
_71 = compare_bytes(move _72, move _73, move _70) -> [return: bb33, unwind unreachable];
+ }
+
+ bb21: {
+ bb35: {
+ StorageDead(_51);
+ StorageDead(_50);
+ StorageDead(_45);
+ StorageDead(_41);
+ StorageDead(_39);
+ StorageDead(_35);
+ StorageDead(_29);
+ StorageDead(_28);
+ StorageDead(_27);
+ goto -> bb6;
+ }
+
+ bb36: {
+ StorageDead(_76);
+ StorageDead(_75);
+ StorageDead(_70);
+ StorageDead(_66);
+ StorageDead(_64);
+ StorageDead(_60);
+ StorageDead(_54);
+ StorageDead(_53);
+ StorageDead(_52);
+ goto -> bb9;
+ }
+
+ bb37: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb22, otherwise: bb15];
+ switchInt(move _24) -> [1: bb38, otherwise: bb15];
+ }
+
+ bb22: {
+ goto -> bb15;
+ }
+
+ bb23: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb24, otherwise: bb15];
+ }
+
+ bb24: {
+ bb38: {
+ goto -> bb17;
+ }
+
+ bb39: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb40, otherwise: bb15];
+ }
+
+ bb40: {
+ goto -> bb15;
}
}

View file

@ -43,10 +43,192 @@
scope 4 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _27: &std::string::String;
let mut _28: &str;
scope 5 (inlined <String as PartialEq<str>>::eq) {
scope 6 (inlined #[track_caller] <String as Index<RangeFull>>::index) {
let _29: &str;
scope 7 (inlined String::as_str) {
let _30: &[u8];
let mut _31: &std::vec::Vec<u8>;
scope 8 (inlined Vec::<u8>::as_slice) {
let mut _32: *const u8;
let mut _33: usize;
scope 9 (inlined Vec::<u8>::as_ptr) {
scope 10 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
scope 11 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) {
scope 12 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) {
let mut _34: std::ptr::NonNull<u8>;
scope 13 (inlined Unique::<u8>::cast::<u8>) {
scope 14 (inlined NonNull::<u8>::cast::<u8>) {
scope 15 (inlined NonNull::<u8>::as_ptr) {
}
}
}
scope 16 (inlined Unique::<u8>::as_non_null_ptr) {
}
}
scope 17 (inlined NonNull::<u8>::as_ptr) {
}
}
}
}
scope 18 (inlined #[track_caller] std::slice::from_raw_parts::<'_, u8>) {
let _35: ();
let mut _36: *mut ();
let _37: *const [u8];
scope 19 (inlined ub_checks::check_language_ub) {
scope 20 (inlined ub_checks::check_language_ub::runtime) {
}
}
scope 21 (inlined std::mem::size_of::<u8>) {
}
scope 22 (inlined std::mem::align_of::<u8>) {
}
scope 23 (inlined slice_from_raw_parts::<u8>) {
scope 24 (inlined std::ptr::from_raw_parts::<[u8], u8>) {
}
}
}
}
scope 25 (inlined from_utf8_unchecked) {
}
}
scope 26 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 27 (inlined #[track_caller] core::str::traits::<impl Index<RangeFull> for str>::index) {
scope 28 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 29 (inlined core::str::traits::<impl PartialEq for str>::eq) {
let mut _38: &&[u8];
let _39: &[u8];
let mut _40: &&[u8];
let _41: &[u8];
scope 30 (inlined core::str::<impl str>::as_bytes) {
}
scope 31 (inlined core::str::<impl str>::as_bytes) {
}
scope 32 (inlined std::cmp::impls::<impl PartialEq for &[u8]>::eq) {
scope 33 (inlined core::slice::cmp::<impl PartialEq for [u8]>::eq) {
scope 34 (inlined <[u8] as core::slice::cmp::SlicePartialEq<u8>>::equal) {
let mut _42: bool;
let mut _43: usize;
let mut _44: usize;
let _45: usize;
let mut _46: i32;
let mut _47: *const u8;
let mut _48: *const u8;
scope 35 {
scope 37 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _50: *const [u8];
}
scope 38 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _51: *const [u8];
}
}
scope 36 (inlined std::mem::size_of_val::<[u8]>) {
let mut _49: *const [u8];
}
}
}
}
}
}
}
scope 5 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _29: &std::string::String;
let mut _30: &str;
scope 39 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
let mut _52: &std::string::String;
let mut _53: &str;
scope 40 (inlined <String as PartialEq<str>>::eq) {
scope 41 (inlined #[track_caller] <String as Index<RangeFull>>::index) {
let _54: &str;
scope 42 (inlined String::as_str) {
let _55: &[u8];
let mut _56: &std::vec::Vec<u8>;
scope 43 (inlined Vec::<u8>::as_slice) {
let mut _57: *const u8;
let mut _58: usize;
scope 44 (inlined Vec::<u8>::as_ptr) {
scope 45 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
scope 46 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) {
scope 47 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) {
let mut _59: std::ptr::NonNull<u8>;
scope 48 (inlined Unique::<u8>::cast::<u8>) {
scope 49 (inlined NonNull::<u8>::cast::<u8>) {
scope 50 (inlined NonNull::<u8>::as_ptr) {
}
}
}
scope 51 (inlined Unique::<u8>::as_non_null_ptr) {
}
}
scope 52 (inlined NonNull::<u8>::as_ptr) {
}
}
}
}
scope 53 (inlined #[track_caller] std::slice::from_raw_parts::<'_, u8>) {
let _60: ();
let mut _61: *mut ();
let _62: *const [u8];
scope 54 (inlined ub_checks::check_language_ub) {
scope 55 (inlined ub_checks::check_language_ub::runtime) {
}
}
scope 56 (inlined std::mem::size_of::<u8>) {
}
scope 57 (inlined std::mem::align_of::<u8>) {
}
scope 58 (inlined slice_from_raw_parts::<u8>) {
scope 59 (inlined std::ptr::from_raw_parts::<[u8], u8>) {
}
}
}
}
scope 60 (inlined from_utf8_unchecked) {
}
}
scope 61 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 62 (inlined #[track_caller] core::str::traits::<impl Index<RangeFull> for str>::index) {
scope 63 (inlined #[track_caller] core::str::traits::<impl SliceIndex<str> for RangeFull>::index) {
}
}
scope 64 (inlined core::str::traits::<impl PartialEq for str>::eq) {
let mut _63: &&[u8];
let _64: &[u8];
let mut _65: &&[u8];
let _66: &[u8];
scope 65 (inlined core::str::<impl str>::as_bytes) {
}
scope 66 (inlined core::str::<impl str>::as_bytes) {
}
scope 67 (inlined std::cmp::impls::<impl PartialEq for &[u8]>::eq) {
scope 68 (inlined core::slice::cmp::<impl PartialEq for [u8]>::eq) {
scope 69 (inlined <[u8] as core::slice::cmp::SlicePartialEq<u8>>::equal) {
let mut _67: bool;
let mut _68: usize;
let mut _69: usize;
let _70: usize;
let mut _71: i32;
let mut _72: *const u8;
let mut _73: *const u8;
scope 70 {
scope 72 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _75: *const [u8];
}
scope 73 (inlined core::slice::<impl [u8]>::as_ptr) {
let mut _76: *const [u8];
}
}
scope 71 (inlined std::mem::size_of_val::<[u8]>) {
let mut _74: *const [u8];
}
}
}
}
}
}
}
bb0: {
@ -71,7 +253,7 @@
bb3: {
_1 = chained_conditions::BacktraceStyle::Off;
- goto -> bb19;
+ goto -> bb27;
+ goto -> bb41;
}
bb4: {
@ -89,7 +271,16 @@
StorageLive(_28);
_27 = copy (*_8);
_28 = copy (*_10);
_7 = <String as PartialEq<str>>::eq(move _27, move _28) -> [return: bb23, unwind: bb22];
StorageLive(_29);
StorageLive(_35);
StorageLive(_30);
StorageLive(_34);
StorageLive(_32);
_34 = copy ((((((*_27).0: std::vec::Vec<u8>).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_32 = copy _34 as *const u8 (Transmute);
StorageLive(_33);
_33 = copy (((*_27).0: std::vec::Vec<u8>).1: usize);
switchInt(UbChecks) -> [0: bb25, otherwise: bb23];
}
bb5: {
@ -120,18 +311,27 @@
StorageLive(_17);
_20 = const chained_conditions::promoted[0];
_17 = &(*_20);
StorageLive(_29);
StorageLive(_30);
_29 = copy (*_15);
_30 = copy (*_17);
_14 = <String as PartialEq<str>>::eq(move _29, move _30) -> [return: bb24, unwind: bb22];
StorageLive(_52);
StorageLive(_53);
_52 = copy (*_15);
_53 = copy (*_17);
StorageLive(_54);
StorageLive(_60);
StorageLive(_55);
StorageLive(_59);
StorageLive(_57);
_59 = copy ((((((*_52).0: std::vec::Vec<u8>).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_57 = copy _59 as *const u8 (Transmute);
StorageLive(_58);
_58 = copy (((*_52).0: std::vec::Vec<u8>).1: usize);
switchInt(UbChecks) -> [0: bb33, otherwise: bb31];
}
bb7: {
StorageDead(_5);
StorageDead(_6);
- goto -> bb19;
+ goto -> bb25;
+ goto -> bb43;
}
bb8: {
@ -154,14 +354,14 @@
StorageDead(_13);
_1 = chained_conditions::BacktraceStyle::Short;
- goto -> bb19;
+ goto -> bb27;
+ goto -> bb41;
}
bb10: {
StorageDead(_12);
StorageDead(_13);
- goto -> bb19;
+ goto -> bb25;
+ goto -> bb43;
}
bb11: {
@ -223,33 +423,223 @@
}
bb23: {
StorageLive(_36);
_36 = copy _34 as *mut () (Transmute);
_35 = std::slice::from_raw_parts::precondition_check(move _36, const <u8 as std::mem::SizedTypeProperties>::SIZE, const <u8 as std::mem::SizedTypeProperties>::ALIGN, copy _33) -> [return: bb24, unwind unreachable];
}
bb24: {
StorageDead(_36);
goto -> bb25;
}
bb25: {
StorageLive(_37);
_37 = *const [u8] from (copy _32, copy _33);
_30 = &(*_37);
StorageDead(_37);
StorageDead(_33);
StorageDead(_32);
StorageDead(_34);
_29 = copy _30 as &str (Transmute);
StorageDead(_30);
StorageLive(_39);
StorageLive(_41);
_39 = copy _29 as &[u8] (Transmute);
_41 = copy _28 as &[u8] (Transmute);
StorageLive(_45);
StorageLive(_50);
StorageLive(_51);
StorageLive(_42);
StorageLive(_43);
_43 = PtrMetadata(copy _39);
StorageLive(_44);
_44 = PtrMetadata(copy _41);
_42 = Ne(move _43, move _44);
switchInt(move _42) -> [0: bb28, otherwise: bb27];
}
bb26: {
StorageDead(_51);
StorageDead(_50);
StorageDead(_45);
StorageDead(_41);
StorageDead(_39);
StorageDead(_35);
StorageDead(_29);
StorageDead(_28);
StorageDead(_27);
switchInt(move _7) -> [0: bb6, otherwise: bb5];
}
bb24: {
StorageDead(_30);
StorageDead(_29);
bb27: {
StorageDead(_44);
StorageDead(_43);
_7 = const false;
StorageDead(_42);
- goto -> bb26;
+ goto -> bb39;
}
bb28: {
StorageDead(_44);
StorageDead(_43);
StorageDead(_42);
StorageLive(_49);
_49 = &raw const (*_39);
_45 = std::intrinsics::size_of_val::<[u8]>(move _49) -> [return: bb30, unwind unreachable];
}
bb29: {
StorageDead(_48);
StorageDead(_47);
_7 = Eq(move _46, const 0_i32);
StorageDead(_46);
goto -> bb26;
}
bb30: {
StorageDead(_49);
StorageLive(_46);
StorageLive(_47);
_50 = &raw const (*_39);
_47 = copy _50 as *const u8 (PtrToPtr);
StorageLive(_48);
_51 = &raw const (*_41);
_48 = copy _51 as *const u8 (PtrToPtr);
_46 = compare_bytes(move _47, move _48, move _45) -> [return: bb29, unwind unreachable];
}
bb31: {
StorageLive(_61);
_61 = copy _59 as *mut () (Transmute);
_60 = std::slice::from_raw_parts::precondition_check(move _61, const <u8 as std::mem::SizedTypeProperties>::SIZE, const <u8 as std::mem::SizedTypeProperties>::ALIGN, copy _58) -> [return: bb32, unwind unreachable];
}
bb32: {
StorageDead(_61);
goto -> bb33;
}
bb33: {
StorageLive(_62);
_62 = *const [u8] from (copy _57, copy _58);
_55 = &(*_62);
StorageDead(_62);
StorageDead(_58);
StorageDead(_57);
StorageDead(_59);
_54 = copy _55 as &str (Transmute);
StorageDead(_55);
StorageLive(_64);
StorageLive(_66);
_64 = copy _54 as &[u8] (Transmute);
_66 = copy _53 as &[u8] (Transmute);
StorageLive(_70);
StorageLive(_75);
StorageLive(_76);
StorageLive(_67);
StorageLive(_68);
_68 = PtrMetadata(copy _64);
StorageLive(_69);
_69 = PtrMetadata(copy _66);
_67 = Ne(move _68, move _69);
switchInt(move _67) -> [0: bb36, otherwise: bb35];
}
bb34: {
StorageDead(_76);
StorageDead(_75);
StorageDead(_70);
StorageDead(_66);
StorageDead(_64);
StorageDead(_60);
StorageDead(_54);
StorageDead(_53);
StorageDead(_52);
switchInt(move _14) -> [0: bb9, otherwise: bb8];
}
bb35: {
StorageDead(_69);
StorageDead(_68);
_14 = const false;
StorageDead(_67);
- goto -> bb34;
+ goto -> bb40;
}
bb36: {
StorageDead(_69);
StorageDead(_68);
StorageDead(_67);
StorageLive(_74);
_74 = &raw const (*_64);
_70 = std::intrinsics::size_of_val::<[u8]>(move _74) -> [return: bb38, unwind unreachable];
}
bb37: {
StorageDead(_73);
StorageDead(_72);
_14 = Eq(move _71, const 0_i32);
StorageDead(_71);
goto -> bb34;
}
bb38: {
StorageDead(_74);
StorageLive(_71);
StorageLive(_72);
_75 = &raw const (*_64);
_72 = copy _75 as *const u8 (PtrToPtr);
StorageLive(_73);
_76 = &raw const (*_66);
_73 = copy _76 as *const u8 (PtrToPtr);
_71 = compare_bytes(move _72, move _73, move _70) -> [return: bb37, unwind unreachable];
+ }
+
+ bb25: {
+ bb39: {
+ StorageDead(_51);
+ StorageDead(_50);
+ StorageDead(_45);
+ StorageDead(_41);
+ StorageDead(_39);
+ StorageDead(_35);
+ StorageDead(_29);
+ StorageDead(_28);
+ StorageDead(_27);
+ goto -> bb6;
+ }
+
+ bb40: {
+ StorageDead(_76);
+ StorageDead(_75);
+ StorageDead(_70);
+ StorageDead(_66);
+ StorageDead(_64);
+ StorageDead(_60);
+ StorageDead(_54);
+ StorageDead(_53);
+ StorageDead(_52);
+ goto -> bb9;
+ }
+
+ bb41: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb26, otherwise: bb16];
+ switchInt(move _24) -> [1: bb42, otherwise: bb16];
+ }
+
+ bb26: {
+ goto -> bb16;
+ }
+
+ bb27: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb28, otherwise: bb16];
+ }
+
+ bb28: {
+ bb42: {
+ goto -> bb18;
+ }
+
+ bb43: {
+ _24 = discriminant(_2);
+ switchInt(move _24) -> [1: bb44, otherwise: bb16];
+ }
+
+ bb44: {
+ goto -> bb16;
}
}

View file

@ -69,6 +69,7 @@ fn check_msg(body: &Body, expected: &str) {
})
.unwrap()
}
Operand::RuntimeChecks(_) => panic!("unexpected runtime checks"),
};
let ConstantKind::Allocated(alloc) = msg_const.const_.kind() else {
unreachable!()