Replace Rvalue::NullaryOp by a variant in mir::ConstValue.

This commit is contained in:
Camille Gillot 2025-11-09 02:57:31 +00:00
parent 0208ee09be
commit 1a227bd47f
53 changed files with 210 additions and 335 deletions

View file

@ -1560,10 +1560,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
self.consume_operand(location, (operand2, span), state);
}
Rvalue::NullaryOp(_op) => {
// nullary ops take no dynamic input; no borrowck effect.
}
Rvalue::Aggregate(aggregate_kind, operands) => {
// We need to report back the list of mutable upvars that were
// moved into the closure and subsequently used by the closure,

View file

@ -314,8 +314,6 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> {
self.consume_operand(location, operand2);
}
Rvalue::NullaryOp(_op) => {}
Rvalue::Aggregate(_, operands) => {
for operand in operands {
self.consume_operand(location, operand);

View file

@ -1046,8 +1046,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
}
}
&Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::ShallowInitBox(_operand, ty) => {
let trait_ref =
ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::Sized, span), [*ty]);
@ -2276,7 +2274,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
| Rvalue::Cast(..)
| Rvalue::ShallowInitBox(..)
| Rvalue::BinaryOp(..)
| Rvalue::NullaryOp(..)
| Rvalue::CopyForDeref(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..)

View file

@ -10,7 +10,7 @@ use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_index::IndexVec;
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::adjustment::PointerCoercion;
use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv};
use rustc_middle::ty::layout::FnAbiOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_session::config::OutputFilenames;
use rustc_span::Symbol;
@ -853,17 +853,6 @@ fn codegen_stmt<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, cur_block: Block, stmt:
fx.bcx.ins().nop();
}
}
Rvalue::NullaryOp(ref null_op) => {
assert!(lval.layout().ty.is_sized(fx.tcx, fx.typing_env()));
let val = match null_op {
NullOp::RuntimeChecks(kind) => kind.value(fx.tcx.sess),
};
let val = CValue::by_val(
fx.bcx.ins().iconst(types::I8, i64::from(val)),
fx.layout_of(fx.tcx.types.bool),
);
lval.write_cvalue(fx, val);
}
Rvalue::Aggregate(ref kind, ref operands)
if matches!(**kind, AggregateKind::RawPtr(..)) =>
{

View file

@ -215,6 +215,11 @@ pub(crate) fn codegen_const_value<'tcx>(
CValue::by_val(val, layout)
}
},
ConstValue::RuntimeChecks(checks) => {
let int = checks.value(fx.tcx.sess);
let int = ScalarInt::try_from_uint(int, Size::from_bits(1)).unwrap();
return CValue::const_val(fx, layout, int);
}
ConstValue::Indirect { alloc_id, offset } => CValue::by_ref(
Pointer::new(pointer_for_allocation(fx, alloc_id))
.offset_i64(fx, i64::try_from(offset.bytes()).unwrap()),

View file

@ -165,6 +165,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
mir::ConstValue::RuntimeChecks(checks) => {
let BackendRepr::Scalar(scalar) = layout.backend_repr else {
bug!("from_const: invalid ByVal layout: {:#?}", layout);
};
let x = Scalar::from_bool(checks.value(bx.tcx().sess));
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
ConstValue::Slice { alloc_id, meta } => {
let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {

View file

@ -619,21 +619,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
mir::Rvalue::NullaryOp(ref null_op) => {
let val = match null_op {
mir::NullOp::RuntimeChecks(kind) => {
let val = kind.value(bx.tcx().sess);
bx.cx().const_bool(val)
}
};
let tcx = self.cx.tcx();
OperandRef {
val: OperandValue::Immediate(val),
layout: self.cx.layout_of(null_op.ty(tcx)),
move_annotation: None,
}
}
mir::Rvalue::ThreadLocalRef(def_id) => {
assert!(bx.cx().tcx().is_static(def_id));
let layout = bx.layout_of(bx.cx().tcx().static_ptr_ty(def_id, bx.typing_env()));

View file

@ -645,7 +645,6 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
Rvalue::Cast(_, _, _) => {}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::ShallowInitBox(_, _) => {}
Rvalue::UnaryOp(op, operand) => {

View file

@ -230,9 +230,7 @@ where
F: FnMut(Local) -> bool,
{
match rvalue {
Rvalue::ThreadLocalRef(_) | Rvalue::NullaryOp(..) => {
Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx))
}
Rvalue::ThreadLocalRef(_) => Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx)),
Rvalue::Discriminant(place) => in_place::<Q, _>(cx, in_local, place.as_ref()),

View file

@ -198,7 +198,6 @@ where
| mir::Rvalue::ThreadLocalRef(..)
| mir::Rvalue::Repeat(..)
| mir::Rvalue::BinaryOp(..)
| mir::Rvalue::NullaryOp(..)
| mir::Rvalue::UnaryOp(..)
| mir::Rvalue::Discriminant(..)
| mir::Rvalue::Aggregate(..)

View file

@ -892,6 +892,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
Immediate::new_slice(self.global_root_pointer(ptr)?.into(), meta, self)
}
mir::ConstValue::RuntimeChecks(checks) => {
let val = M::runtime_checks(self, checks)?;
Scalar::from_bool(val).into()
}
};
interp_ok(OpTy { op: Operand::Immediate(imm), layout })
}

View file

@ -1,7 +1,6 @@
use either::Either;
use rustc_abi::Size;
use rustc_apfloat::{Float, FloatConvert};
use rustc_middle::mir::NullOp;
use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{self, FloatTy, ScalarInt};
@ -505,11 +504,4 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
}
}
}
pub fn nullary_op(&self, null_op: NullOp) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
use rustc_middle::mir::NullOp::*;
interp_ok(match null_op {
RuntimeChecks(r) => ImmTy::from_bool(M::runtime_checks(self, r)?, *self.tcx),
})
}
}

View file

@ -203,11 +203,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
self.write_immediate(*result, &dest)?;
}
NullaryOp(null_op) => {
let val = self.nullary_op(null_op)?;
self.write_immediate(*val, &dest)?;
}
Aggregate(box ref kind, ref operands) => {
self.write_aggregate(kind, operands, &dest)?;
}

View file

@ -68,6 +68,10 @@ pub enum ConstValue {
/// Offset into `alloc`
offset: Size,
},
/// Special constants whose value depends on the evaluation context. Their value depends on a
/// flag on the crate being codegenned.
RuntimeChecks(RuntimeChecks),
}
#[cfg(target_pointer_width = "64")]
@ -77,7 +81,10 @@ impl ConstValue {
#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar> {
match *self {
ConstValue::Indirect { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
ConstValue::Indirect { .. }
| ConstValue::Slice { .. }
| ConstValue::ZeroSized
| ConstValue::RuntimeChecks(_) => None,
ConstValue::Scalar(val) => Some(val),
}
}
@ -133,7 +140,7 @@ impl ConstValue {
tcx: TyCtxt<'tcx>,
) -> Option<&'tcx [u8]> {
let (alloc_id, start, len) = match self {
ConstValue::Scalar(_) | ConstValue::ZeroSized => {
ConstValue::Scalar(_) | ConstValue::ZeroSized | ConstValue::RuntimeChecks(_) => {
bug!("`try_get_slice_bytes` on non-slice constant")
}
&ConstValue::Slice { alloc_id, meta } => (alloc_id, 0, meta),
@ -185,7 +192,9 @@ impl ConstValue {
/// Can return `true` even if there is no provenance.
pub fn may_have_provenance(&self, tcx: TyCtxt<'_>, size: Size) -> bool {
match *self {
ConstValue::ZeroSized | ConstValue::Scalar(Scalar::Int(_)) => return false,
ConstValue::ZeroSized
| ConstValue::Scalar(Scalar::Int(_))
| ConstValue::RuntimeChecks(_) => return false,
ConstValue::Scalar(Scalar::Ptr(..)) => return true,
// It's hard to find out the part of the allocation we point to;
// just conservatively check everything.
@ -224,6 +233,29 @@ impl ConstValue {
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum RuntimeChecks {
/// Returns whether we should perform some UB-checking at runtime.
/// See the `ub_checks` intrinsic docs for details.
UbChecks,
/// Returns whether we should perform contract-checking at runtime.
/// See the `contract_checks` intrinsic docs for details.
ContractChecks,
/// Returns whether we should perform some overflow-checking at runtime.
/// See the `overflow_checks` intrinsic docs for details.
OverflowChecks,
}
impl RuntimeChecks {
pub fn value(self, sess: &rustc_session::Session) -> bool {
match self {
Self::UbChecks => sess.ub_checks(),
Self::ContractChecks => sess.contract_checks(),
Self::OverflowChecks => sess.overflow_checks(),
}
}
}
///////////////////////////////////////////////////////////////////////////
/// Constants
@ -517,6 +549,7 @@ impl<'tcx> Const<'tcx> {
ConstValue::Slice { .. }
| ConstValue::ZeroSized
| ConstValue::Scalar(_)
| ConstValue::RuntimeChecks(_)
| ConstValue::Indirect { .. },
_,
) => true,

View file

@ -607,6 +607,9 @@ impl<'tcx> Body<'tcx> {
typing_env,
crate::ty::EarlyBinder::bind(constant.const_),
);
if let Const::Val(ConstValue::RuntimeChecks(check), _) = mono_literal {
return Some(check.value(tcx.sess) as u128);
}
mono_literal.try_eval_bits(tcx, typing_env)
};
@ -649,9 +652,6 @@ impl<'tcx> Body<'tcx> {
}
match rvalue {
Rvalue::NullaryOp(NullOp::RuntimeChecks(kind)) => {
Some((kind.value(tcx.sess) as u128, targets))
}
Rvalue::Use(Operand::Constant(constant)) => {
let bits = eval_mono_const(constant)?;
Some((bits, targets))

View file

@ -1097,15 +1097,6 @@ impl<'tcx> Debug for Rvalue<'tcx> {
BinaryOp(ref op, box (ref a, ref b)) => write!(fmt, "{op:?}({a:?}, {b:?})"),
UnaryOp(ref op, ref a) => write!(fmt, "{op:?}({a:?})"),
Discriminant(ref place) => write!(fmt, "discriminant({place:?})"),
NullaryOp(ref op) => match op {
NullOp::RuntimeChecks(RuntimeChecks::UbChecks) => write!(fmt, "UbChecks()"),
NullOp::RuntimeChecks(RuntimeChecks::ContractChecks) => {
write!(fmt, "ContractChecks()")
}
NullOp::RuntimeChecks(RuntimeChecks::OverflowChecks) => {
write!(fmt, "OverflowChecks()")
}
},
ThreadLocalRef(did) => ty::tls::with(|tcx| {
let muta = tcx.static_mutability(did).unwrap().prefix_str();
write!(fmt, "&/*tls*/ {}{}", muta, tcx.def_path_str(did))
@ -1527,6 +1518,7 @@ pub fn write_allocations<'tcx>(
match val {
ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => Some(ptr.provenance.alloc_id()),
ConstValue::Scalar(interpret::Scalar::Int { .. }) => None,
ConstValue::RuntimeChecks(_) => None,
ConstValue::ZeroSized => None,
ConstValue::Slice { alloc_id, .. } | ConstValue::Indirect { alloc_id, .. } => {
// FIXME: we don't actually want to print all of these, since some are printed nicely directly as values inline in MIR.
@ -1977,6 +1969,7 @@ fn pretty_print_const_value_tcx<'tcx>(
fmt.write_str(&p.into_buffer())?;
return Ok(());
}
(ConstValue::RuntimeChecks(checks), _) => return write!(fmt, "{checks:?}"),
(ConstValue::ZeroSized, ty::FnDef(d, s)) => {
let mut p = FmtPrinter::new(tcx, Namespace::ValueNS);
p.print_alloc_ids = true;

View file

@ -756,7 +756,6 @@ impl<'tcx> Rvalue<'tcx> {
_,
)
| Rvalue::BinaryOp(_, _)
| Rvalue::NullaryOp(_)
| Rvalue::UnaryOp(_, _)
| Rvalue::Discriminant(_)
| Rvalue::Aggregate(_, _)
@ -794,7 +793,6 @@ impl<'tcx> Rvalue<'tcx> {
op.ty(tcx, arg_ty)
}
Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => tcx.types.bool,
Rvalue::Aggregate(ref ak, ref ops) => match **ak {
AggregateKind::Array(ty) => Ty::new_array(tcx, ty, ops.len() as u64),
AggregateKind::Tuple => {
@ -858,14 +856,6 @@ impl BorrowKind {
}
}
impl NullOp {
pub fn ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self {
NullOp::RuntimeChecks(_) => tcx.types.bool,
}
}
}
impl<'tcx> UnOp {
pub fn ty(&self, tcx: TyCtxt<'tcx>, arg_ty: Ty<'tcx>) -> Ty<'tcx> {
match self {

View file

@ -1418,9 +1418,6 @@ pub enum Rvalue<'tcx> {
/// matching types and return a value of that type.
BinaryOp(BinOp, Box<(Operand<'tcx>, Operand<'tcx>)>),
/// Computes a value as described by the operation.
NullaryOp(NullOp),
/// Exactly like `BinaryOp`, but less operands.
///
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
@ -1561,35 +1558,6 @@ pub enum AggregateKind<'tcx> {
RawPtr(Ty<'tcx>, Mutability),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum NullOp {
/// Returns whether we should perform some checking at runtime.
RuntimeChecks(RuntimeChecks),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum RuntimeChecks {
/// Returns whether we should perform some UB-checking at runtime.
/// See the `ub_checks` intrinsic docs for details.
UbChecks,
/// Returns whether we should perform contract-checking at runtime.
/// See the `contract_checks` intrinsic docs for details.
ContractChecks,
/// Returns whether we should perform some overflow-checking at runtime.
/// See the `overflow_checks` intrinsic docs for details.
OverflowChecks,
}
impl RuntimeChecks {
pub fn value(self, sess: &rustc_session::Session) -> bool {
match self {
Self::UbChecks => sess.ub_checks(),
Self::ContractChecks => sess.contract_checks(),
Self::OverflowChecks => sess.overflow_checks(),
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(HashStable, TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
pub enum UnOp {

View file

@ -293,9 +293,9 @@ pub fn reverse_postorder<'a, 'tcx>(
/// reachable.
///
/// Such a traversal is mostly useful because it lets us skip lowering the `false` side
/// of `if <T as Trait>::CONST`, as well as [`NullOp::RuntimeChecks`].
/// of `if <T as Trait>::CONST`, as well as [`ConstValue::RuntimeChecks`].
///
/// [`NullOp::RuntimeChecks`]: rustc_middle::mir::NullOp::RuntimeChecks
/// [`ConstValue::RuntimeChecks`]: rustc_middle::mir::ConstValue::RuntimeChecks
pub fn mono_reachable<'a, 'tcx>(
body: &'a Body<'tcx>,
tcx: TyCtxt<'tcx>,

View file

@ -775,8 +775,6 @@ macro_rules! make_mir_visitor {
);
}
Rvalue::NullaryOp(_op) => {}
Rvalue::Aggregate(kind, operands) => {
let kind = &$($mutability)? **kind;
match kind {
@ -972,10 +970,7 @@ macro_rules! make_mir_visitor {
self.visit_span($(& $mutability)? *span);
match const_ {
Const::Ty(_, ct) => self.visit_ty_const($(&$mutability)? *ct, location),
Const::Val(_, ty) => {
self.visit_ty($(& $mutability)? *ty, TyContext::Location(location));
}
Const::Unevaluated(_, ty) => {
Const::Val(_, ty) | Const::Unevaluated(_, ty) => {
self.visit_ty($(& $mutability)? *ty, TyContext::Location(location));
}
}

View file

@ -243,7 +243,6 @@ TrivialTypeTraversalImpls! {
crate::mir::FakeReadCause,
crate::mir::Local,
crate::mir::MirPhase,
crate::mir::NullOp,
crate::mir::Promoted,
crate::mir::RawPtrKind,
crate::mir::RetagKind,
@ -284,6 +283,7 @@ TrivialTypeTraversalImpls! {
// interners).
TrivialTypeTraversalAndLiftImpls! {
// tidy-alphabetical-start
crate::mir::RuntimeChecks,
crate::ty::BoundTy,
crate::ty::ParamTy,
crate::ty::instance::ReifyReason,

View file

@ -91,7 +91,6 @@ where
| Rvalue::ThreadLocalRef(..)
| Rvalue::Repeat(..)
| Rvalue::BinaryOp(..)
| Rvalue::NullaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..)
| Rvalue::Aggregate(..)

View file

@ -448,10 +448,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> {
}
}
Rvalue::CopyForDeref(..) => unreachable!(),
Rvalue::Ref(..)
| Rvalue::RawPtr(..)
| Rvalue::Discriminant(..)
| Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {}
Rvalue::Ref(..) | Rvalue::RawPtr(..) | Rvalue::Discriminant(..) => {}
}
}

View file

@ -60,7 +60,7 @@ impl<'b, 'tcx> CostChecker<'b, 'tcx> {
}
impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
fn visit_statement(&mut self, statement: &Statement<'tcx>, _: Location) {
// Most costs are in rvalues and terminators, not in statements.
match statement.kind {
StatementKind::Intrinsic(ref ndi) => {
@ -69,31 +69,8 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
NonDivergingIntrinsic::CopyNonOverlapping(..) => CALL_PENALTY,
};
}
_ => self.super_statement(statement, location),
}
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, _location: Location) {
match rvalue {
// FIXME: Should we do the same for `OverflowChecks`?
Rvalue::NullaryOp(NullOp::RuntimeChecks(RuntimeChecks::UbChecks), ..)
if !self
.tcx
.sess
.opts
.unstable_opts
.inline_mir_preserve_debug
.unwrap_or(self.tcx.sess.ub_checks()) =>
{
// If this is in optimized MIR it's because it's used later,
// so if we don't need UB checks this session, give a bonus
// here to offset the cost of the call later.
self.bonus += CALL_PENALTY;
}
// These are essentially constants that didn't end up in an Operand,
// so treat them as also being free.
Rvalue::NullaryOp(..) => {}
_ => self.penalty += INSTR_COST,
StatementKind::Assign(..) => self.penalty += INSTR_COST,
_ => {}
}
}

View file

@ -463,9 +463,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
FlatSet::Top => FlatSet::Top,
}
}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => {
return ValueOrPlace::TOP;
}
Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map),
Rvalue::Use(operand) => return self.handle_operand(operand, state),
Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),

View file

@ -248,7 +248,6 @@ enum Value<'a, 'tcx> {
Discriminant(VnIndex),
// Operations.
NullaryOp(NullOp),
UnaryOp(UnOp, VnIndex),
BinaryOp(BinOp, VnIndex, VnIndex),
Cast {
@ -681,7 +680,6 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
discr_value.into()
}
NullaryOp(NullOp::RuntimeChecks(_)) => return None,
UnaryOp(un_op, operand) => {
let operand = self.eval_to_const(operand)?;
let operand = self.ecx.read_immediate(operand).discard_err()?;
@ -1034,7 +1032,6 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
let op = self.simplify_operand(op, location)?;
Value::Repeat(op, amount)
}
Rvalue::NullaryOp(op) => Value::NullaryOp(op),
Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
Rvalue::Ref(_, borrow_kind, ref mut place) => {
self.simplify_place_projection(place, location);

View file

@ -4,10 +4,11 @@ use rustc_abi::ExternAbi;
use rustc_ast::attr;
use rustc_hir::LangItem;
use rustc_middle::bug;
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::*;
use rustc_middle::ty::layout::ValidityRequirement;
use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout};
use rustc_span::{DUMMY_SP, Symbol, sym};
use rustc_span::{Symbol, sym};
use crate::simplify::simplify_duplicate_switch_targets;
@ -29,22 +30,22 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify {
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let preserve_ub_checks =
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
if !preserve_ub_checks {
SimplifyUbCheck { tcx }.visit_body(body);
}
let ctx = InstSimplifyContext {
tcx,
local_decls: &body.local_decls,
typing_env: body.typing_env(tcx),
};
let preserve_ub_checks =
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
for block in body.basic_blocks.as_mut() {
for statement in block.statements.iter_mut() {
let StatementKind::Assign(box (.., rvalue)) = &mut statement.kind else {
continue;
};
if !preserve_ub_checks {
ctx.simplify_ub_check(rvalue);
}
ctx.simplify_bool_cmp(rvalue);
ctx.simplify_ref_deref(rvalue);
ctx.simplify_ptr_aggregate(rvalue);
@ -168,17 +169,6 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
}
}
fn simplify_ub_check(&self, rvalue: &mut Rvalue<'tcx>) {
// FIXME: Should we do the same for overflow checks?
let Rvalue::NullaryOp(NullOp::RuntimeChecks(RuntimeChecks::UbChecks)) = *rvalue else {
return;
};
let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks());
let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None };
*rvalue = Rvalue::Use(Operand::Constant(Box::new(constant)));
}
fn simplify_cast(&self, rvalue: &mut Rvalue<'tcx>) {
let Rvalue::Cast(kind, operand, cast_ty) = rvalue else { return };
@ -362,3 +352,22 @@ fn resolve_rust_intrinsic<'tcx>(
let intrinsic = tcx.intrinsic(def_id)?;
Some((intrinsic.name, args))
}
struct SimplifyUbCheck<'tcx> {
tcx: TyCtxt<'tcx>,
}
impl<'tcx> MutVisitor<'tcx> for SimplifyUbCheck<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
if let Operand::Constant(c) = operand
&& let Const::Val(c, _) = &mut c.const_
&& let ConstValue::RuntimeChecks(RuntimeChecks::UbChecks) = c
{
*c = ConstValue::from_bool(self.tcx.sess.ub_checks());
}
}
}

View file

@ -444,7 +444,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
| Rvalue::Cast(..)
| Rvalue::ShallowInitBox(..)
| Rvalue::Discriminant(..)
| Rvalue::NullaryOp(..)
| Rvalue::WrapUnsafeBinder(..) => {}
}
@ -605,8 +604,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
Ref(..) | RawPtr(..) => return None,
NullaryOp(NullOp::RuntimeChecks(_)) => return None,
ShallowInitBox(..) => return None,
Cast(ref kind, ref value, to) => match kind {

View file

@ -88,7 +88,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Lint<'a, 'tcx> {
| Rvalue::ShallowInitBox(..)
| Rvalue::WrapUnsafeBinder(..) => true,
Rvalue::ThreadLocalRef(..)
| Rvalue::NullaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::BinaryOp(..)
| Rvalue::Ref(..)

View file

@ -35,7 +35,14 @@ impl<'tcx> crate::MirPass<'tcx> for LowerIntrinsics {
terminator.source_info,
StatementKind::Assign(Box::new((
*destination,
Rvalue::NullaryOp(NullOp::RuntimeChecks(op)),
Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
span: terminator.source_info.span,
user_ty: None,
const_: Const::Val(
ConstValue::RuntimeChecks(op),
tcx.types.bool,
),
}))),
))),
));
terminator.kind = TerminatorKind::Goto { target };

View file

@ -443,10 +443,6 @@ impl<'tcx> Validator<'_, 'tcx> {
self.validate_operand(operand)?;
}
Rvalue::NullaryOp(op) => match op {
NullOp::RuntimeChecks(_) => {}
},
Rvalue::ShallowInitBox(_, _) => return Err(Unpromotable),
Rvalue::UnaryOp(op, operand) => {

View file

@ -1439,7 +1439,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
Rvalue::Repeat(_, _)
| Rvalue::ThreadLocalRef(_)
| Rvalue::RawPtr(_, _)
| Rvalue::NullaryOp(NullOp::RuntimeChecks(_))
| Rvalue::Discriminant(_) => {}
Rvalue::WrapUnsafeBinder(op, ty) => {

View file

@ -53,6 +53,7 @@ pub(crate) fn try_new_allocation<'tcx>(
ConstValue::Scalar(scalar) => {
alloc::try_new_scalar(layout, scalar, cx).map(|alloc| alloc.stable(tables, cx))
}
ConstValue::RuntimeChecks(_) => todo!(),
ConstValue::ZeroSized => Ok(new_empty_allocation(layout.align.abi)),
ConstValue::Slice { alloc_id, meta } => {
alloc::try_new_slice(layout, alloc_id, meta, cx).map(|alloc| alloc.stable(tables, cx))

View file

@ -587,9 +587,6 @@ pub enum Rvalue {
/// nature of this operation?
ThreadLocalRef(crate::CrateItem),
/// Computes a value as described by the operation.
NullaryOp(NullOp),
/// Exactly like `BinaryOp`, but less operands.
///
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
@ -641,7 +638,6 @@ impl Rvalue {
.discriminant_ty()
.ok_or_else(|| error!("Expected a `RigidTy` but found: {place_ty:?}"))
}
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) => Ok(Ty::bool_ty()),
Rvalue::Aggregate(ak, ops) => match *ak {
AggregateKind::Array(ty) => Ty::try_new_array(ty, ops.len() as u64),
AggregateKind::Tuple => Ok(Ty::new_tuple(
@ -1018,22 +1014,6 @@ pub enum CastKind {
Subtype,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum NullOp {
/// Codegen conditions for runtime checks.
RuntimeChecks(RuntimeChecks),
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum RuntimeChecks {
/// cfg!(ub_checks), but at codegen time
UbChecks,
/// cfg!(contract_checks), but at codegen time
ContractChecks,
/// cfg!(overflow_checks), but at codegen time
OverflowChecks,
}
impl Operand {
/// Get the type of an operand relative to the local declaration.
///

View file

@ -386,9 +386,6 @@ fn pretty_rvalue<W: Write>(writer: &mut W, rval: &Rvalue) -> io::Result<()> {
Rvalue::ThreadLocalRef(item) => {
write!(writer, "thread_local_ref{item:?}")
}
Rvalue::NullaryOp(nul) => {
write!(writer, "{nul:?}() \" \"")
}
Rvalue::UnaryOp(un, op) => {
write!(writer, "{:?}({})", un, pretty_operand(op))
}

View file

@ -282,7 +282,6 @@ macro_rules! make_mir_visitor {
self.visit_operand(op, location)
}
Rvalue::ThreadLocalRef(_) => {}
Rvalue::NullaryOp(_) => {}
Rvalue::UnaryOp(_, op) | Rvalue::Use(op) => {
self.visit_operand(op, location);
}

View file

@ -1357,6 +1357,7 @@ pub enum ConstantKind {
Ty(TyConst),
Allocated(Allocation),
Unevaluated(UnevaluatedConst),
RuntimeChecks(RuntimeChecks),
Param(ParamConst),
/// Store ZST constants.
/// We have to special handle these constants since its type might be generic.
@ -1376,6 +1377,16 @@ pub struct UnevaluatedConst {
pub promoted: Option<Promoted>,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize)]
pub enum RuntimeChecks {
/// cfg!(ub_checks), but at codegen time
UbChecks,
/// cfg!(contract_checks), but at codegen time
ContractChecks,
/// cfg!(overflow_checks), but at codegen time
OverflowChecks,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize)]
pub enum TraitSpecializationKind {
None,

View file

@ -232,7 +232,6 @@ impl<'tcx> Stable<'tcx> for mir::Rvalue<'tcx> {
)
}
}
NullaryOp(null_op) => crate::mir::Rvalue::NullaryOp(null_op.stable(tables, cx)),
UnaryOp(un_op, op) => {
crate::mir::Rvalue::UnaryOp(un_op.stable(tables, cx), op.stable(tables, cx))
}
@ -312,21 +311,18 @@ impl<'tcx> Stable<'tcx> for mir::FakeBorrowKind {
}
}
impl<'tcx> Stable<'tcx> for mir::NullOp {
type T = crate::mir::NullOp;
impl<'tcx> Stable<'tcx> for mir::RuntimeChecks {
type T = crate::ty::RuntimeChecks;
fn stable<'cx>(
&self,
_: &mut Tables<'cx, BridgeTys>,
_: &CompilerCtxt<'cx, BridgeTys>,
) -> Self::T {
use rustc_middle::mir::NullOp::*;
use rustc_middle::mir::RuntimeChecks::*;
match self {
RuntimeChecks(op) => crate::mir::NullOp::RuntimeChecks(match op {
UbChecks => crate::mir::RuntimeChecks::UbChecks,
ContractChecks => crate::mir::RuntimeChecks::ContractChecks,
OverflowChecks => crate::mir::RuntimeChecks::OverflowChecks,
}),
UbChecks => crate::ty::RuntimeChecks::UbChecks,
ContractChecks => crate::ty::RuntimeChecks::ContractChecks,
OverflowChecks => crate::ty::RuntimeChecks::OverflowChecks,
}
}
}
@ -890,6 +886,13 @@ impl<'tcx> Stable<'tcx> for rustc_middle::mir::Const<'tcx> {
let ty = ty.stable(tables, cx);
MirConst::new(ConstantKind::ZeroSized, ty, id)
}
mir::Const::Val(mir::ConstValue::RuntimeChecks(checks), ty) => {
let ty = cx.lift(ty).unwrap();
let checks = cx.lift(checks).unwrap();
let ty = ty.stable(tables, cx);
let kind = ConstantKind::RuntimeChecks(checks.stable(tables, cx));
MirConst::new(kind, ty, id)
}
mir::Const::Val(val, ty) => {
let ty = cx.lift(ty).unwrap();
let val = cx.lift(val).unwrap();

View file

@ -68,7 +68,9 @@ impl Visitable for MirConst {
super::ty::ConstantKind::Ty(ct) => ct.visit(visitor)?,
super::ty::ConstantKind::Allocated(alloc) => alloc.visit(visitor)?,
super::ty::ConstantKind::Unevaluated(uv) => uv.visit(visitor)?,
super::ty::ConstantKind::Param(_) | super::ty::ConstantKind::ZeroSized => {}
super::ty::ConstantKind::RuntimeChecks(_)
| super::ty::ConstantKind::Param(_)
| super::ty::ConstantKind::ZeroSized => {}
}
self.ty().visit(visitor)
}

View file

@ -95,17 +95,16 @@ pub use intrinsics::ub_checks as check_library_ub;
#[rustc_allow_const_fn_unstable(const_eval_select)]
pub(crate) const fn check_language_ub() -> bool {
// Only used for UB checks so we may const_eval_select.
intrinsics::ub_checks()
&& const_eval_select!(
@capture { } -> bool:
if const {
// Always disable UB checks.
false
} else {
// Disable UB checks in Miri.
!cfg!(miri)
}
)
const_eval_select!(
@capture { } -> bool:
if const {
// Always disable UB checks.
false
} else {
// Disable UB checks in Miri.
!cfg!(miri)
}
) && intrinsics::ub_checks()
}
/// Checks whether `ptr` is properly aligned with respect to the given alignment, and

View file

@ -13,7 +13,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::Obligation;
use rustc_lint::LateContext;
use rustc_middle::mir::{
Body, CastKind, NonDivergingIntrinsic, NullOp, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind,
Body, CastKind, NonDivergingIntrinsic, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind,
Terminator, TerminatorKind,
};
use rustc_middle::traits::{BuiltinImplSource, ImplSource, ObligationCause};
@ -194,7 +194,7 @@ fn check_rvalue<'tcx>(
))
}
},
Rvalue::NullaryOp(NullOp::RuntimeChecks(_)) | Rvalue::ShallowInitBox(_, _) => Ok(()),
Rvalue::ShallowInitBox(_, _) => Ok(()),
Rvalue::UnaryOp(_, operand) => {
let ty = operand.ty(body, cx.tcx);
if ty.is_integral() || ty.is_bool() {

View file

@ -8,8 +8,10 @@ type Demo = [u8; 3];
#[no_mangle]
pub fn slice_iter_len_eq_zero(y: std::slice::Iter<'_, Demo>) -> bool {
// CHECK-NOT: sub
// CHECK: %[[RET:.+]] = icmp eq ptr {{%y.0, %y.1|%y.1, %y.0}}
// CHECK: ret i1 %[[RET]]
// CHECK: %2 = icmp ne ptr %1, null
// CHECK-NEXT: tail call void @llvm.assume(i1 %2)
// CHECK-NEXT: %[[RET:.+]] = icmp eq ptr {{%0, %1|%1, %0}}
// CHECK-NEXT: ret i1 %[[RET]]
y.len() == 0
}
@ -31,7 +33,7 @@ struct MyZST;
// CHECK-LABEL: @slice_zst_iter_len_eq_zero
#[no_mangle]
pub fn slice_zst_iter_len_eq_zero(y: std::slice::Iter<'_, MyZST>) -> bool {
// CHECK: %[[RET:.+]] = icmp eq ptr %y.1, null
// CHECK: %[[RET:.+]] = icmp eq ptr %1, null
// CHECK: ret i1 %[[RET]]
y.len() == 0
}

View file

@ -16,7 +16,6 @@
scope 4 (inlined #[track_caller] unreachable_unchecked) {
let _5: ();
scope 5 (inlined core::ub_checks::check_language_ub) {
let mut _6: bool;
scope 6 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
@ -38,9 +37,7 @@
}
bb2: {
- StorageLive(_6);
- _6 = const false;
- assume(copy _6);
- assume(const false);
- _5 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ unreachable;
}

View file

@ -13,7 +13,7 @@
let mut _11: *const ();
let mut _16: usize;
let mut _17: usize;
let mut _27: usize;
let mut _26: usize;
scope 1 {
debug vp_ctx => _1;
let _5: *const ();
@ -27,7 +27,7 @@
debug _x => _8;
}
scope 18 (inlined foo) {
let mut _28: *const [()];
let mut _27: *const [()];
}
}
scope 16 (inlined slice_from_raw_parts::<()>) {
@ -52,7 +52,7 @@
scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) {
scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) {
scope 13 (inlined NonNull::<[u8]>::cast::<u8>) {
let mut _26: *mut [u8];
let mut _25: *mut [u8];
scope 14 (inlined NonNull::<[u8]>::as_ptr) {
}
}
@ -65,9 +65,8 @@
}
}
scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) {
let mut _23: bool;
let _24: ();
let mut _25: std::ptr::Alignment;
let _23: ();
let mut _24: std::ptr::Alignment;
}
}
}
@ -94,10 +93,8 @@
StorageLive(_20);
StorageLive(_21);
StorageLive(_22);
StorageLive(_24);
StorageLive(_23);
_23 = UbChecks();
switchInt(move _23) -> [0: bb6, otherwise: bb5];
switchInt(const UbChecks) -> [0: bb6, otherwise: bb5];
}
bb1: {
@ -117,14 +114,14 @@
bb4: {
_21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>);
- StorageLive(_26);
- StorageLive(_25);
+ nop;
_26 = copy _21 as *mut [u8] (Transmute);
_12 = copy _26 as *mut u8 (PtrToPtr);
- StorageDead(_26);
_25 = copy _21 as *mut [u8] (Transmute);
_12 = copy _25 as *mut u8 (PtrToPtr);
- StorageDead(_25);
+ nop;
StorageDead(_19);
StorageDead(_24);
StorageDead(_23);
StorageDead(_22);
StorageDead(_21);
StorageDead(_20);
@ -132,7 +129,7 @@
StorageDead(_17);
StorageDead(_16);
- _13 = copy _12 as *const () (PtrToPtr);
+ _13 = copy _26 as *const () (PtrToPtr);
+ _13 = copy _25 as *const () (PtrToPtr);
_14 = NonNull::<()> { pointer: copy _13 };
_15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> };
_3 = Box::<()>(move _15, const std::alloc::Global);
@ -157,21 +154,21 @@
+ nop;
StorageLive(_7);
_7 = copy _5;
StorageLive(_27);
_27 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _27);
StorageLive(_26);
_26 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _26);
+ _6 = *const [()] from (copy _5, const 1_usize);
StorageDead(_27);
StorageDead(_26);
StorageDead(_7);
StorageLive(_8);
StorageLive(_9);
_9 = copy _6;
StorageLive(_28);
- _28 = copy _9;
StorageLive(_27);
- _27 = copy _9;
- _8 = copy _9 as *mut () (PtrToPtr);
+ _28 = copy _6;
+ _27 = copy _6;
+ _8 = copy _5 as *mut () (PtrToPtr);
StorageDead(_28);
StorageDead(_27);
StorageDead(_9);
_0 = const ();
StorageDead(_8);
@ -183,18 +180,17 @@
}
bb5: {
- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
- _23 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _23 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
}
bb6: {
StorageDead(_23);
StorageLive(_25);
- _25 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _25 };
+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
StorageLive(_24);
- _24 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _24 };
+ _24 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
+ _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }};
StorageDead(_25);
StorageDead(_24);
StorageLive(_19);
- _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable];
+ _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];

View file

@ -13,7 +13,7 @@
let mut _11: *const ();
let mut _16: usize;
let mut _17: usize;
let mut _27: usize;
let mut _26: usize;
scope 1 {
debug vp_ctx => _1;
let _5: *const ();
@ -27,7 +27,7 @@
debug _x => _8;
}
scope 18 (inlined foo) {
let mut _28: *const [()];
let mut _27: *const [()];
}
}
scope 16 (inlined slice_from_raw_parts::<()>) {
@ -52,7 +52,7 @@
scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) {
scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) {
scope 13 (inlined NonNull::<[u8]>::cast::<u8>) {
let mut _26: *mut [u8];
let mut _25: *mut [u8];
scope 14 (inlined NonNull::<[u8]>::as_ptr) {
}
}
@ -65,9 +65,8 @@
}
}
scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) {
let mut _23: bool;
let _24: ();
let mut _25: std::ptr::Alignment;
let _23: ();
let mut _24: std::ptr::Alignment;
}
}
}
@ -94,10 +93,8 @@
StorageLive(_20);
StorageLive(_21);
StorageLive(_22);
StorageLive(_24);
StorageLive(_23);
_23 = UbChecks();
switchInt(move _23) -> [0: bb6, otherwise: bb5];
switchInt(const UbChecks) -> [0: bb6, otherwise: bb5];
}
bb1: {
@ -117,14 +114,14 @@
bb4: {
_21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>);
- StorageLive(_26);
- StorageLive(_25);
+ nop;
_26 = copy _21 as *mut [u8] (Transmute);
_12 = copy _26 as *mut u8 (PtrToPtr);
- StorageDead(_26);
_25 = copy _21 as *mut [u8] (Transmute);
_12 = copy _25 as *mut u8 (PtrToPtr);
- StorageDead(_25);
+ nop;
StorageDead(_19);
StorageDead(_24);
StorageDead(_23);
StorageDead(_22);
StorageDead(_21);
StorageDead(_20);
@ -132,7 +129,7 @@
StorageDead(_17);
StorageDead(_16);
- _13 = copy _12 as *const () (PtrToPtr);
+ _13 = copy _26 as *const () (PtrToPtr);
+ _13 = copy _25 as *const () (PtrToPtr);
_14 = NonNull::<()> { pointer: copy _13 };
_15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> };
_3 = Box::<()>(move _15, const std::alloc::Global);
@ -157,21 +154,21 @@
+ nop;
StorageLive(_7);
_7 = copy _5;
StorageLive(_27);
_27 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _27);
StorageLive(_26);
_26 = const 1_usize;
- _6 = *const [()] from (copy _7, copy _26);
+ _6 = *const [()] from (copy _5, const 1_usize);
StorageDead(_27);
StorageDead(_26);
StorageDead(_7);
StorageLive(_8);
StorageLive(_9);
_9 = copy _6;
StorageLive(_28);
- _28 = copy _9;
StorageLive(_27);
- _27 = copy _9;
- _8 = copy _9 as *mut () (PtrToPtr);
+ _28 = copy _6;
+ _27 = copy _6;
+ _8 = copy _5 as *mut () (PtrToPtr);
StorageDead(_28);
StorageDead(_27);
StorageDead(_9);
_0 = const ();
StorageDead(_8);
@ -183,18 +180,17 @@
}
bb5: {
- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
- _23 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable];
+ _23 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable];
}
bb6: {
StorageDead(_23);
StorageLive(_25);
- _25 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _25 };
+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
StorageLive(_24);
- _24 = copy _17 as std::ptr::Alignment (Transmute);
- _18 = Layout { size: copy _16, align: move _24 };
+ _24 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0);
+ _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }};
StorageDead(_25);
StorageDead(_24);
StorageLive(_19);
- _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable];
+ _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable];

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl u16>::unchecked_shl) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind unreachable];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(const UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShlUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl u16>::unchecked_shl) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl u16>::unchecked_shl(move _3, move _4) -> [return: bb1, unwind continue];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(const UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShlUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl i64>::unchecked_shr) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind unreachable];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(const UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShrUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -10,7 +10,6 @@
+ scope 1 (inlined #[track_caller] core::num::<impl i64>::unchecked_shr) {
+ let _5: ();
+ scope 2 (inlined core::ub_checks::check_language_ub) {
+ let mut _6: bool;
+ scope 3 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -23,9 +22,7 @@
_4 = copy _2;
- _0 = core::num::<impl i64>::unchecked_shr(move _3, move _4) -> [return: bb1, unwind continue];
+ StorageLive(_5);
+ StorageLive(_6);
+ _6 = UbChecks();
+ switchInt(copy _6) -> [0: bb2, otherwise: bb1];
+ switchInt(const UbChecks) -> [0: bb2, otherwise: bb1];
}
bb1: {
@ -34,7 +31,6 @@
+
+ bb2: {
+ _0 = ShrUnchecked(copy _3, copy _4);
+ StorageDead(_6);
+ StorageDead(_5);
StorageDead(_4);
StorageDead(_3);

View file

@ -12,7 +12,6 @@
+ scope 3 (inlined #[track_caller] unreachable_unchecked) {
+ let _4: ();
+ scope 4 (inlined core::ub_checks::check_language_ub) {
+ let mut _5: bool;
+ scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -34,9 +33,7 @@
+ }
+
+ bb2: {
+ StorageLive(_5);
+ _5 = UbChecks();
+ assume(copy _5);
+ assume(const UbChecks);
+ _4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ }
+

View file

@ -12,7 +12,6 @@
+ scope 3 (inlined #[track_caller] unreachable_unchecked) {
+ let _4: ();
+ scope 4 (inlined core::ub_checks::check_language_ub) {
+ let mut _5: bool;
+ scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
+ }
+ }
@ -38,9 +37,7 @@
- bb2 (cleanup): {
- resume;
+ bb2: {
+ StorageLive(_5);
+ _5 = UbChecks();
+ assume(copy _5);
+ assume(const UbChecks);
+ _4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
+ }
+

View file

@ -5,8 +5,7 @@
pub fn unwrap_unchecked(x: Option<i32>) -> i32 {
// CHECK-LABEL: fn unwrap_unchecked(
// CHECK-NOT: UbChecks()
// CHECK: [[assume:_.*]] = const false;
// CHECK-NEXT: assume(copy [[assume]]);
// CHECK: assume(const false);
// CHECK-NEXT: unreachable_unchecked::precondition_check
unsafe { x.unwrap_unchecked() }
}

View file

@ -12,7 +12,6 @@
scope 3 (inlined #[track_caller] unreachable_unchecked) {
let _4: ();
scope 4 (inlined core::ub_checks::check_language_ub) {
let mut _5: bool;
scope 5 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
@ -33,10 +32,8 @@
}
bb2: {
StorageLive(_5);
- _5 = UbChecks();
+ _5 = const false;
assume(copy _5);
- assume(const UbChecks);
+ assume(const false);
_4 = unreachable_unchecked::precondition_check() -> [return: bb1, unwind unreachable];
}