Merge pull request #4839 from RalfJung/rustup

Rustup
This commit is contained in:
Ralf Jung 2026-02-01 21:28:28 +00:00 committed by GitHub
commit 62d518190e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
259 changed files with 3968 additions and 1970 deletions

View file

@ -580,9 +580,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.51"
version = "4.5.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394"
dependencies = [
"clap_builder",
"clap_derive",
@ -600,9 +600,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.51"
version = "4.5.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00"
dependencies = [
"anstream",
"anstyle",
@ -4354,7 +4354,6 @@ name = "rustc_mir_transform"
version = "0.0.0"
dependencies = [
"either",
"hashbrown 0.16.1",
"itertools",
"rustc_abi",
"rustc_arena",
@ -4565,7 +4564,6 @@ dependencies = [
name = "rustc_query_system"
version = "0.0.0"
dependencies = [
"hashbrown 0.16.1",
"parking_lot",
"rustc_abi",
"rustc_ast",
@ -5623,6 +5621,7 @@ version = "0.1.0"
dependencies = [
"build_helper",
"cargo_metadata 0.21.0",
"clap",
"fluent-syntax",
"globset",
"ignore",

View file

@ -172,8 +172,22 @@ impl<T> TypedArena<T> {
available_bytes >= additional_bytes
}
/// Allocates storage for `len >= 1` values in this arena, and returns a
/// raw pointer to the first value's storage.
///
/// # Safety
///
/// Caller must initialize each of the `len` slots to a droppable value
/// before the arena is dropped.
///
/// In practice, this typically means that the caller must be able to
/// raw-copy `len` already-initialized values into the slice without any
/// possibility of panicking.
///
/// FIXME(Zalathar): This is *very* fragile; perhaps we need a different
/// approach to arena-allocating slices of droppable values.
#[inline]
fn alloc_raw_slice(&self, len: usize) -> *mut T {
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
assert!(size_of::<T>() != 0);
assert!(len != 0);
@ -208,7 +222,7 @@ impl<T> TypedArena<T> {
&self,
iter: impl IntoIterator<Item = Result<T, E>>,
) -> Result<&mut [T], E> {
// Despite the similarlty with `DroplessArena`, we cannot reuse their fast case. The reason
// Despite the similarity with `DroplessArena`, we cannot reuse their fast case. The reason
// is subtle: these arenas are reentrant. In other words, `iter` may very well be holding a
// reference to `self` and adding elements to the arena during iteration.
//
@ -229,9 +243,15 @@ impl<T> TypedArena<T> {
}
// Move the content to the arena by copying and then forgetting it.
let len = vec.len();
let start_ptr = self.alloc_raw_slice(len);
// SAFETY: After allocating raw storage for exactly `len` values, we
// must fully initialize the storage without panicking, and we must
// also prevent the stale values in the vec from being dropped.
Ok(unsafe {
let start_ptr = self.alloc_raw_slice(len);
// Initialize the newly-allocated storage without panicking.
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
// Prevent the stale values in the vec from being dropped.
vec.set_len(0);
slice::from_raw_parts_mut(start_ptr, len)
})
@ -490,19 +510,6 @@ impl DroplessArena {
}
}
/// Used by `Lift` to check whether this slice is allocated
/// in this arena.
#[inline]
pub fn contains_slice<T>(&self, slice: &[T]) -> bool {
for chunk in self.chunks.borrow_mut().iter_mut() {
let ptr = slice.as_ptr().cast::<u8>().cast_mut();
if chunk.start() <= ptr && chunk.end() >= ptr {
return true;
}
}
false
}
/// Allocates a string slice that is copied into the `DroplessArena`, returning a
/// reference to it. Will panic if passed an empty string.
///
@ -584,7 +591,7 @@ impl DroplessArena {
&self,
iter: impl IntoIterator<Item = Result<T, E>>,
) -> Result<&mut [T], E> {
// Despite the similarlty with `alloc_from_iter`, we cannot reuse their fast case, as we
// Despite the similarity with `alloc_from_iter`, we cannot reuse their fast case, as we
// cannot know the minimum length of the iterator in this case.
assert!(size_of::<T>() != 0);

View file

@ -1961,7 +1961,8 @@ impl<'a> State<'a> {
}
fn print_lifetime(&mut self, lifetime: ast::Lifetime) {
self.print_name(lifetime.ident.name)
self.word(lifetime.ident.name.to_string());
self.ann_post(lifetime.ident)
}
fn print_lifetime_bounds(&mut self, bounds: &ast::GenericBounds) {

View file

@ -1,4 +1,5 @@
use rustc_ast::{LitIntType, LitKind, MetaItemLit};
use rustc_hir::attrs::RustcLayoutType;
use rustc_session::errors;
use super::prelude::*;
@ -329,3 +330,73 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcOffloadKernelParser {
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcOffloadKernel;
}
pub(crate) struct RustcLayoutParser;
impl<S: Stage> CombineAttributeParser<S> for RustcLayoutParser {
const PATH: &[rustc_span::Symbol] = &[sym::rustc_layout];
type Item = RustcLayoutType;
const CONVERT: ConvertFn<Self::Item> = |items, _| AttributeKind::RustcLayout(items);
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
Allow(Target::TyAlias),
]);
const TEMPLATE: AttributeTemplate =
template!(List: &["abi", "align", "size", "homogenous_aggregate", "debug"]);
fn extend(
cx: &mut AcceptContext<'_, '_, S>,
args: &ArgParser,
) -> impl IntoIterator<Item = Self::Item> {
let ArgParser::List(items) = args else {
cx.expected_list(cx.attr_span, args);
return vec![];
};
let mut result = Vec::new();
for item in items.mixed() {
let Some(arg) = item.meta_item() else {
cx.unexpected_literal(item.span());
continue;
};
let Some(ident) = arg.ident() else {
cx.expected_identifier(arg.span());
return vec![];
};
let ty = match ident.name {
sym::abi => RustcLayoutType::Abi,
sym::align => RustcLayoutType::Align,
sym::size => RustcLayoutType::Size,
sym::homogeneous_aggregate => RustcLayoutType::HomogenousAggregate,
sym::debug => RustcLayoutType::Debug,
_ => {
cx.expected_specific_argument(
ident.span,
&[sym::abi, sym::align, sym::size, sym::homogeneous_aggregate, sym::debug],
);
continue;
}
};
result.push(ty);
}
result
}
}
pub(crate) struct RustcNonConstTraitMethodParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcNonConstTraitMethodParser {
const PATH: &'static [Symbol] = &[sym::rustc_non_const_trait_method];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Method(MethodKind::Trait { body: true })),
Allow(Target::Method(MethodKind::Trait { body: false })),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcNonConstTraitMethod;
}

View file

@ -75,13 +75,13 @@ use crate::attributes::rustc_dump::{
RustcDumpVtable,
};
use crate::attributes::rustc_internal::{
RustcHasIncoherentInherentImplsParser, RustcLayoutScalarValidRangeEndParser,
RustcHasIncoherentInherentImplsParser, RustcLayoutParser, RustcLayoutScalarValidRangeEndParser,
RustcLayoutScalarValidRangeStartParser, RustcLegacyConstGenericsParser,
RustcLintOptDenyFieldAccessParser, RustcLintOptTyParser, RustcLintQueryInstabilityParser,
RustcLintUntrackedQueryInformationParser, RustcMainParser, RustcMustImplementOneOfParser,
RustcNeverReturnsNullPointerParser, RustcNoImplicitAutorefsParser, RustcNounwindParser,
RustcObjectLifetimeDefaultParser, RustcOffloadKernelParser, RustcScalableVectorParser,
RustcSimdMonomorphizeLaneLimitParser,
RustcNeverReturnsNullPointerParser, RustcNoImplicitAutorefsParser,
RustcNonConstTraitMethodParser, RustcNounwindParser, RustcObjectLifetimeDefaultParser,
RustcOffloadKernelParser, RustcScalableVectorParser, RustcSimdMonomorphizeLaneLimitParser,
};
use crate::attributes::semantics::MayDangleParser;
use crate::attributes::stability::{
@ -198,6 +198,7 @@ attribute_parsers!(
Combine<ForceTargetFeatureParser>,
Combine<LinkParser>,
Combine<ReprParser>,
Combine<RustcLayoutParser>,
Combine<TargetFeatureParser>,
Combine<UnstableFeatureBoundParser>,
// tidy-alphabetical-end
@ -304,6 +305,7 @@ attribute_parsers!(
Single<WithoutArgs<RustcMainParser>>,
Single<WithoutArgs<RustcNeverReturnsNullPointerParser>>,
Single<WithoutArgs<RustcNoImplicitAutorefsParser>>,
Single<WithoutArgs<RustcNonConstTraitMethodParser>>,
Single<WithoutArgs<RustcNounwindParser>>,
Single<WithoutArgs<RustcOffloadKernelParser>>,
Single<WithoutArgs<RustcPassIndirectlyInNonRusticAbisParser>>,

View file

@ -774,16 +774,16 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
// Attempting to call a trait method?
if let Some(trait_did) = tcx.trait_of_assoc(callee) {
// We can't determine the actual callee here, so we have to do different checks
// than usual.
// We can't determine the actual callee (the underlying impl of the trait) here, so we have
// to do different checks than usual.
trace!("attempting to call a trait method");
let trait_is_const = tcx.is_const_trait(trait_did);
let is_const = tcx.constness(callee) == hir::Constness::Const;
// Only consider a trait to be const if the const conditions hold.
// Otherwise, it's really misleading to call something "conditionally"
// const when it's very obviously not conditionally const.
if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
if is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
// Trait calls are always conditionally-const.
self.check_op(ops::ConditionallyConstCall {
callee,

View file

@ -1,7 +1,8 @@
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{
Constness, ExprKind, ForeignItemKind, ImplItem, ImplItemImplKind, ImplItemKind, Item, ItemKind,
Node, TraitItem, TraitItemKind, VariantData,
Node, TraitItem, TraitItemKind, VariantData, find_attr,
};
use rustc_middle::query::Providers;
use rustc_middle::ty::TyCtxt;
@ -36,7 +37,13 @@ fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Constness {
Constness::NotConst => tcx.constness(tcx.local_parent(def_id)),
}
}
Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(..), .. }) => tcx.trait_def(tcx.local_parent(def_id)).constness,
Node::TraitItem(ti @ TraitItem { kind: TraitItemKind::Fn(..), .. }) => {
if find_attr!(tcx.hir_attrs(ti.hir_id()), AttributeKind::RustcNonConstTraitMethod) {
Constness::NotConst
} else {
tcx.trait_def(tcx.local_parent(def_id)).constness
}
}
_ => {
tcx.dcx().span_bug(
tcx.def_span(def_id),

View file

@ -49,6 +49,10 @@ pub use std::{assert_matches, debug_assert_matches};
pub use atomic_ref::AtomicRef;
pub use ena::{snapshot_vec, undo_log, unify};
// Re-export `hashbrown::hash_table`, because it's part of our API
// (via `ShardedHashMap`), and because it lets other compiler crates use the
// lower-level `HashTable` API without a tricky `hashbrown` dependency.
pub use hashbrown::hash_table;
pub use rustc_index::static_assert_size;
// Re-export some data-structure crates which are part of our public API.
pub use {either, indexmap, smallvec, thin_vec};

View file

@ -3,7 +3,7 @@ use std::hash::{Hash, Hasher};
use std::{iter, mem};
use either::Either;
use hashbrown::hash_table::{Entry, HashTable};
use hashbrown::hash_table::{self, Entry, HashTable};
use crate::fx::FxHasher;
use crate::sync::{CacheAligned, Lock, LockGuard, Mode, is_dyn_thread_safe};
@ -140,7 +140,7 @@ pub fn shards() -> usize {
1
}
pub type ShardedHashMap<K, V> = Sharded<HashTable<(K, V)>>;
pub type ShardedHashMap<K, V> = Sharded<hash_table::HashTable<(K, V)>>;
impl<K: Eq, V> ShardedHashMap<K, V> {
pub fn with_capacity(cap: usize) -> Self {

View file

@ -44,3 +44,14 @@ fn h1() -> i32 {
// did you mean `a::I`?
}
```
### Enum types used as values
Enums are types and cannot be used directly as values.
```compile_fail,E0423
fn main(){
let x = Option::<i32>;
//~^ ERROR expected value,found enum `Option`
}
```

View file

@ -1329,6 +1329,12 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
"`#[rustc_has_incoherent_inherent_impls]` allows the addition of incoherent inherent impls for \
the given type by annotating all impl items with `#[rustc_allow_incoherent_impl]`"
),
rustc_attr!(
rustc_non_const_trait_method, AttributeType::Normal, template!(Word),
ErrorFollowing, EncodeCrossCrate::No,
"`#[rustc_non_const_trait_method]` should only used by the standard library to mark trait methods \
as non-const to allow large traits an easier transition to const"
),
BuiltinAttribute {
name: sym::rustc_diagnostic_item,

View file

@ -690,6 +690,15 @@ impl IntoDiagArg for CrateType {
}
}
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)]
pub enum RustcLayoutType {
Abi,
Align,
Size,
HomogenousAggregate,
Debug,
}
/// Represents parsed *built-in* inert attributes.
///
/// ## Overview
@ -1048,6 +1057,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_has_incoherent_inherent_impls]`
RustcHasIncoherentInherentImpls,
/// Represents `#[rustc_layout]`
RustcLayout(ThinVec<RustcLayoutType>),
/// Represents `#[rustc_layout_scalar_valid_range_end]`.
RustcLayoutScalarValidRangeEnd(Box<u128>, Span),
@ -1084,6 +1096,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_no_implicit_autorefs]`
RustcNoImplicitAutorefs,
/// Represents `#[rustc_non_const_trait_method]`.
RustcNonConstTraitMethod,
/// Represents `#[rustc_nounwind]`
RustcNounwind,

View file

@ -111,6 +111,7 @@ impl AttributeKind {
RustcDumpVtable(..) => No,
RustcDynIncompatibleTrait(..) => No,
RustcHasIncoherentInherentImpls => Yes,
RustcLayout(..) => No,
RustcLayoutScalarValidRangeEnd(..) => Yes,
RustcLayoutScalarValidRangeStart(..) => Yes,
RustcLegacyConstGenerics { .. } => Yes,
@ -123,6 +124,7 @@ impl AttributeKind {
RustcMustImplementOneOf { .. } => No,
RustcNeverReturnsNullPointer => Yes,
RustcNoImplicitAutorefs => Yes,
RustcNonConstTraitMethod => No, // should be reported via other queries like `constness`
RustcNounwind => No,
RustcObjcClass { .. } => No,
RustcObjcSelector { .. } => No,

View file

@ -218,7 +218,7 @@ fn compare_method_predicate_entailment<'tcx>(
trait_m_predicates.instantiate_own(tcx, trait_to_impl_args).map(|(predicate, _)| predicate),
);
let is_conditionally_const = tcx.is_conditionally_const(impl_def_id);
let is_conditionally_const = tcx.is_conditionally_const(impl_m.def_id);
if is_conditionally_const {
// Augment the hybrid param-env with the const conditions
// of the impl header and the trait method.

View file

@ -6,7 +6,7 @@ use itertools::Itertools;
use rustc_hir_analysis::autoderef::{Autoderef, AutoderefKind};
use rustc_infer::infer::InferOk;
use rustc_infer::traits::PredicateObligations;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, DerefAdjustKind, OverloadedDeref};
use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
@ -45,22 +45,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(autoderef.final_ty()));
let steps: Vec<_> = steps
.iter()
.map(|&(source, kind)| {
if let AutoderefKind::Overloaded = kind {
self.try_overloaded_deref(autoderef.span(), source).and_then(
|InferOk { value: method, obligations: o }| {
.map(|&(source, kind)| match kind {
AutoderefKind::Overloaded => {
self.try_overloaded_deref(autoderef.span(), source)
.and_then(|InferOk { value: method, obligations: o }| {
obligations.extend(o);
// FIXME: we should assert the sig is &T here... there's no reason for this to be fallible.
if let ty::Ref(_, _, mutbl) = *method.sig.output().kind() {
Some(OverloadedDeref { mutbl, span: autoderef.span() })
Some(DerefAdjustKind::Overloaded(OverloadedDeref {
mutbl,
span: autoderef.span(),
}))
} else {
None
}
},
)
} else {
None
})
.unwrap_or(DerefAdjustKind::Builtin)
}
AutoderefKind::Builtin => DerefAdjustKind::Builtin,
})
.zip_eq(targets)
.map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })

View file

@ -50,7 +50,8 @@ use rustc_infer::traits::{
};
use rustc_middle::span_bug;
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCoercion,
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, DerefAdjustKind,
PointerCoercion,
};
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
@ -595,7 +596,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(DerefAdjustKind::Builtin), target: ty_a },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: Ty::new_ref(self.tcx, r_borrow, ty_a, mutbl_b),
@ -606,7 +607,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
coerce_mutbls(mt_a, mt_b)?;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(DerefAdjustKind::Builtin), target: ty_a },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)),
target: Ty::new_ptr(self.tcx, ty_a, mt_b),
@ -936,7 +937,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
self.unify_and(
a_raw,
b,
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }],
[Adjustment { kind: Adjust::Deref(DerefAdjustKind::Builtin), target: mt_a.ty }],
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
ForceLeakCheck::No,
)

View file

@ -605,6 +605,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
parent_id = self.tcx.parent_hir_id(*hir_id);
parent
}
hir::Node::Stmt(hir::Stmt { hir_id, kind: hir::StmtKind::Let(_), .. }) => {
parent_id = self.tcx.parent_hir_id(*hir_id);
parent
}
hir::Node::LetStmt(hir::LetStmt { hir_id, .. }) => {
parent_id = self.tcx.parent_hir_id(*hir_id);
parent
}
hir::Node::Block(_) => {
parent_id = self.tcx.parent_hir_id(parent_id);
parent

View file

@ -22,6 +22,7 @@ use rustc_middle::hir::place::ProjectionKind;
pub use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, Projection};
use rustc_middle::mir::FakeReadCause;
use rustc_middle::thir::DerefPatBorrowMode;
use rustc_middle::ty::adjustment::DerefAdjustKind;
use rustc_middle::ty::{
self, BorrowKind, Ty, TyCtxt, TypeFoldable, TypeVisitableExt as _, adjustment,
};
@ -733,14 +734,14 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
self.consume_or_copy(&place_with_id, place_with_id.hir_id);
}
adjustment::Adjust::Deref(None) => {}
adjustment::Adjust::Deref(DerefAdjustKind::Builtin) => {}
// Autoderefs for overloaded Deref calls in fact reference
// their receiver. That is, if we have `(*x)` where `x`
// is of type `Rc<T>`, then this in fact is equivalent to
// `x.deref()`. Since `deref()` is declared with `&self`,
// this is an autoref of `x`.
adjustment::Adjust::Deref(Some(ref deref)) => {
adjustment::Adjust::Deref(DerefAdjustKind::Overloaded(deref)) => {
let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
self.delegate.borrow_mut().borrow(&place_with_id, place_with_id.hir_id, bk);
}
@ -1272,9 +1273,9 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
{
let target = self.cx.resolve_vars_if_possible(adjustment.target);
match adjustment.kind {
adjustment::Adjust::Deref(overloaded) => {
adjustment::Adjust::Deref(deref_kind) => {
// Equivalent to *expr or something similar.
let base = if let Some(deref) = overloaded {
let base = if let DerefAdjustKind::Overloaded(deref) = deref_kind {
let ref_ty = Ty::new_ref(
self.cx.tcx(),
self.cx.tcx().lifetimes.re_erased,

View file

@ -20,7 +20,9 @@ use rustc_hir_analysis::hir_ty_lowering::{
use rustc_infer::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_infer::infer::{DefineOpaqueTypes, InferResult};
use rustc_lint::builtin::SELF_CONSTRUCTOR_FROM_OUTER_ITEM;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, DerefAdjustKind,
};
use rustc_middle::ty::{
self, AdtKind, CanonicalUserType, GenericArgsRef, GenericParamDefKind, IsIdentity,
SizedTraitKind, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitableExt, UserArgs,
@ -266,7 +268,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("apply_adjustments: adding `{:?}` as diverging type var", a.target);
}
}
Adjust::Deref(Some(overloaded_deref)) => {
Adjust::Deref(DerefAdjustKind::Overloaded(overloaded_deref)) => {
self.enforce_context_effects(
None,
expr.span,
@ -274,7 +276,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.tcx.mk_args(&[expr_ty.into()]),
);
}
Adjust::Deref(None) => {
Adjust::Deref(DerefAdjustKind::Builtin) => {
// FIXME(const_trait_impl): We *could* enforce `&T: [const] Deref` here.
}
Adjust::Pointer(_pointer_coercion) => {

View file

@ -2841,7 +2841,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// the bad interactions of the given hack detailed in (note_1).
debug!("check_pat_ref: expected={:?}", expected);
match expected.maybe_pinned_ref() {
Some((r_ty, r_pinned, r_mutbl))
Some((r_ty, r_pinned, r_mutbl, _))
if ((ref_pat_matches_mut_ref && r_mutbl >= pat_mutbl)
|| r_mutbl == pat_mutbl)
&& pat_pinned == r_pinned =>

View file

@ -4,8 +4,8 @@ use rustc_infer::infer::InferOk;
use rustc_infer::traits::{Obligation, ObligationCauseCode};
use rustc_middle::span_bug;
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, OverloadedDeref,
PointerCoercion,
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, DerefAdjustKind,
OverloadedDeref, PointerCoercion,
};
use rustc_middle::ty::{self, Ty};
use rustc_span::{Span, sym};
@ -298,7 +298,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.typeck_results.borrow_mut().adjustments_mut().remove(expr.hir_id);
if let Some(mut adjustments) = previous_adjustments {
for adjustment in &mut adjustments {
if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind
if let Adjust::Deref(DerefAdjustKind::Overloaded(ref mut deref)) =
adjustment.kind
&& let Some(ok) = self.try_mutable_overloaded_place_op(
expr.span,
source,

View file

@ -1,7 +1,9 @@
use rustc_ast::{BorrowKind, UnOp};
use rustc_hir::attrs::AttributeKind;
use rustc_hir::{Expr, ExprKind, Mutability, find_attr};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, OverloadedDeref};
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AutoBorrow, DerefAdjustKind, OverloadedDeref,
};
use rustc_session::{declare_lint, declare_lint_pass};
use crate::lints::{
@ -165,12 +167,14 @@ fn peel_derefs_adjustments<'a>(mut adjs: &'a [Adjustment<'a>]) -> &'a [Adjustmen
/// an implicit borrow (or has an implicit borrow via an overloaded deref).
fn has_implicit_borrow(Adjustment { kind, .. }: &Adjustment<'_>) -> Option<(Mutability, bool)> {
match kind {
&Adjust::Deref(Some(OverloadedDeref { mutbl, .. })) => Some((mutbl, true)),
&Adjust::Deref(DerefAdjustKind::Overloaded(OverloadedDeref { mutbl, .. })) => {
Some((mutbl, true))
}
&Adjust::Borrow(AutoBorrow::Ref(mutbl)) => Some((mutbl.into(), false)),
Adjust::NeverToAny
| Adjust::Pointer(..)
| Adjust::ReborrowPin(..)
| Adjust::Deref(None)
| Adjust::Deref(DerefAdjustKind::Builtin)
| Adjust::Borrow(AutoBorrow::RawPtr(..)) => None,
}
}

View file

@ -1,7 +1,7 @@
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_middle::ty::adjustment::Adjust;
use rustc_middle::ty::adjustment::{Adjust, DerefAdjustKind};
use rustc_session::{declare_lint, declare_lint_pass};
use rustc_span::sym;
@ -114,7 +114,10 @@ impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
// If there is any user defined auto-deref step, then we don't want to warn.
// https://github.com/rust-lang/rust-clippy/issues/9272
if arg_adjustments.iter().any(|adj| matches!(adj.kind, Adjust::Deref(Some(_)))) {
if arg_adjustments
.iter()
.any(|adj| matches!(adj.kind, Adjust::Deref(DerefAdjustKind::Overloaded(_))))
{
return;
}

View file

@ -805,7 +805,10 @@ trait UnusedDelimLint {
ExprKind::Break(_label, None) => return false,
ExprKind::Break(_label, Some(break_expr)) => {
return matches!(break_expr.kind, ExprKind::Block(..));
// `if (break 'label i) { ... }` removing parens would make `i { ... }`
// be parsed as a struct literal, so keep parentheses if the break value
// ends with a path (which could be mistaken for a struct name).
return matches!(break_expr.kind, ExprKind::Block(..) | ExprKind::Path(..));
}
ExprKind::Range(_lhs, Some(rhs), _limits) => {
@ -1782,7 +1785,7 @@ declare_lint! {
declare_lint_pass!(UnusedAllocation => [UNUSED_ALLOCATION]);
impl<'tcx> LateLintPass<'tcx> for UnusedAllocation {
fn check_expr(&mut self, cx: &LateContext<'_>, e: &hir::Expr<'_>) {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &hir::Expr<'_>) {
match e.kind {
hir::ExprKind::Call(path_expr, [_])
if let hir::ExprKind::Path(qpath) = &path_expr.kind
@ -1793,6 +1796,12 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAllocation {
for adj in cx.typeck_results().expr_adjustments(e) {
if let adjustment::Adjust::Borrow(adjustment::AutoBorrow::Ref(m)) = adj.kind {
if let ty::Ref(_, inner_ty, _) = adj.target.kind()
&& inner_ty.is_box()
{
// If the target type is `&Box<T>` or `&mut Box<T>`, the allocation is necessary
continue;
}
match m {
adjustment::AutoBorrowMutability::Not => {
cx.emit_span_lint(UNUSED_ALLOCATION, e.span, UnusedAllocationDiag);

View file

@ -280,31 +280,21 @@ fn add_query_desc_cached_impl(
let crate::query::Providers { #name: _, .. };
};
// Find out if we should cache the query on disk
let cache = if let Some((args, expr)) = modifiers.cache.as_ref() {
// Generate a function to check whether we should cache the query to disk, for some key.
if let Some((args, expr)) = modifiers.cache.as_ref() {
let tcx = args.as_ref().map(|t| quote! { #t }).unwrap_or_else(|| quote! { _ });
// expr is a `Block`, meaning that `{ #expr }` gets expanded
// to `{ { stmts... } }`, which triggers the `unused_braces` lint.
// we're taking `key` by reference, but some rustc types usually prefer being passed by value
quote! {
cached.extend(quote! {
#[allow(unused_variables, unused_braces, rustc::pass_by_value)]
#[inline]
pub fn #name<'tcx>(#tcx: TyCtxt<'tcx>, #key: &crate::query::queries::#name::Key<'tcx>) -> bool {
#ra_hint
#expr
}
}
} else {
quote! {
// we're taking `key` by reference, but some rustc types usually prefer being passed by value
#[allow(rustc::pass_by_value)]
#[inline]
pub fn #name<'tcx>(_: TyCtxt<'tcx>, _: &crate::query::queries::#name::Key<'tcx>) -> bool {
#ra_hint
false
}
}
};
});
}
let (tcx, desc) = &modifiers.desc;
let tcx = tcx.as_ref().map_or_else(|| quote! { _ }, |t| quote! { #t });
@ -322,10 +312,6 @@ fn add_query_desc_cached_impl(
descs.extend(quote! {
#desc
});
cached.extend(quote! {
#cache
});
}
pub(super) fn rustc_queries(input: TokenStream) -> TokenStream {

View file

@ -10,7 +10,6 @@ use rustc_query_system::query::{QueryCache, QueryMode, try_get_cached};
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span};
use crate::dep_graph;
use crate::query::IntoQueryParam;
use crate::query::erase::{self, Erasable, Erased};
use crate::ty::TyCtxt;
@ -27,7 +26,6 @@ pub(crate) fn query_get_at<'tcx, Cache>(
where
Cache: QueryCache,
{
let key = key.into_query_param();
match try_get_cached(tcx, query_cache, &key) {
Some(value) => value,
None => execute_query(tcx, span, key, QueryMode::Get).unwrap(),
@ -46,7 +44,6 @@ pub(crate) fn query_ensure<'tcx, Cache>(
) where
Cache: QueryCache,
{
let key = key.into_query_param();
if try_get_cached(tcx, query_cache, &key).is_none() {
execute_query(tcx, DUMMY_SP, key, QueryMode::Ensure { check_cache });
}
@ -66,7 +63,6 @@ where
Cache: QueryCache<Value = Erased<Result<T, ErrorGuaranteed>>>,
Result<T, ErrorGuaranteed>: Erasable,
{
let key = key.into_query_param();
if let Some(res) = try_get_cached(tcx, query_cache, &key) {
erase::restore_val(res).map(drop)
} else {

View file

@ -88,7 +88,7 @@ use rustc_index::IndexVec;
use rustc_lint_defs::LintId;
use rustc_macros::rustc_queries;
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{QueryMode, QueryStackDeferred, QueryState};
use rustc_query_system::query::{QueryMode, QueryState};
use rustc_session::Limits;
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};
use rustc_session::cstore::{

View file

@ -18,6 +18,18 @@ use crate::query::{
};
use crate::ty::TyCtxt;
pub type WillCacheOnDiskForKeyFn<'tcx, Key> = fn(tcx: TyCtxt<'tcx>, key: &Key) -> bool;
pub type TryLoadFromDiskFn<'tcx, Key, Value> = fn(
tcx: TyCtxt<'tcx>,
key: &Key,
prev_index: SerializedDepNodeIndex,
index: DepNodeIndex,
) -> Option<Value>;
pub type IsLoadableFromDiskFn<'tcx, Key> =
fn(tcx: TyCtxt<'tcx>, key: &Key, index: SerializedDepNodeIndex) -> bool;
/// Stores function pointers and other metadata for a particular query.
///
/// Used indirectly by query plumbing in `rustc_query_system`, via a trait.
@ -31,18 +43,11 @@ pub struct QueryVTable<'tcx, C: QueryCache> {
pub query_state: usize,
// Offset of this query's cache field in the QueryCaches struct
pub query_cache: usize,
pub cache_on_disk: fn(tcx: TyCtxt<'tcx>, key: &C::Key) -> bool,
pub will_cache_on_disk_for_key_fn: Option<WillCacheOnDiskForKeyFn<'tcx, C::Key>>,
pub execute_query: fn(tcx: TyCtxt<'tcx>, k: C::Key) -> C::Value,
pub compute: fn(tcx: TyCtxt<'tcx>, key: C::Key) -> C::Value,
pub can_load_from_disk: bool,
pub try_load_from_disk: fn(
tcx: TyCtxt<'tcx>,
key: &C::Key,
prev_index: SerializedDepNodeIndex,
index: DepNodeIndex,
) -> Option<C::Value>,
pub loadable_from_disk:
fn(tcx: TyCtxt<'tcx>, key: &C::Key, index: SerializedDepNodeIndex) -> bool,
pub try_load_from_disk_fn: Option<TryLoadFromDiskFn<'tcx, C::Key, C::Value>>,
pub is_loadable_from_disk_fn: Option<IsLoadableFromDiskFn<'tcx, C::Key>>,
pub hash_result: HashResult<C::Value>,
pub value_from_cycle_error:
fn(tcx: TyCtxt<'tcx>, cycle_error: &CycleError, guar: ErrorGuaranteed) -> C::Value,
@ -440,7 +445,7 @@ macro_rules! define_callbacks {
#[derive(Default)]
pub struct QueryStates<'tcx> {
$(
pub $name: QueryState<$($K)*, QueryStackDeferred<'tcx>>,
pub $name: QueryState<'tcx, $($K)*>,
)*
}

View file

@ -97,7 +97,7 @@ pub enum Adjust {
NeverToAny,
/// Dereference once, producing a place.
Deref(Option<OverloadedDeref>),
Deref(DerefAdjustKind),
/// Take the address and produce either a `&` or `*` pointer.
Borrow(AutoBorrow),
@ -108,6 +108,12 @@ pub enum Adjust {
ReborrowPin(hir::Mutability),
}
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub enum DerefAdjustKind {
Builtin,
Overloaded(OverloadedDeref),
}
/// An overloaded autoderef step, representing a `Deref(Mut)::deref(_mut)`
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
/// The target type is `U` in both cases, with the region and mutability

View file

@ -3470,10 +3470,9 @@ impl<'tcx> TyCtxt<'tcx> {
pub fn intrinsic(self, def_id: impl IntoQueryParam<DefId> + Copy) -> Option<ty::IntrinsicDef> {
match self.def_kind(def_id) {
DefKind::Fn | DefKind::AssocFn => {}
_ => return None,
DefKind::Fn | DefKind::AssocFn => self.intrinsic_raw(def_id),
_ => None,
}
self.intrinsic_raw(def_id)
}
pub fn next_trait_solver_globally(self) -> bool {

View file

@ -2086,8 +2086,16 @@ impl<'tcx> TyCtxt<'tcx> {
DefKind::Impl { of_trait: false } => {
self.constness(def_id) == hir::Constness::Const
}
DefKind::Impl { of_trait: true } | DefKind::Trait => {
self.is_conditionally_const(parent_def_id)
DefKind::Impl { of_trait: true } => {
let Some(trait_method_did) = self.trait_item_of(def_id) else {
return false;
};
self.constness(trait_method_did) == hir::Constness::Const
&& self.is_conditionally_const(parent_def_id)
}
DefKind::Trait => {
self.constness(def_id) == hir::Constness::Const
&& self.is_conditionally_const(parent_def_id)
}
_ => bug!("unexpected parent item of associated fn: {parent_def_id:?}"),
}

View file

@ -1336,25 +1336,17 @@ impl<'tcx> Ty<'tcx> {
}
}
pub fn pinned_ref(self) -> Option<(Ty<'tcx>, ty::Mutability)> {
if let Adt(def, args) = self.kind()
&& def.is_pin()
&& let &ty::Ref(_, ty, mutbl) = args.type_at(0).kind()
{
return Some((ty, mutbl));
}
None
}
pub fn maybe_pinned_ref(self) -> Option<(Ty<'tcx>, ty::Pinnedness, ty::Mutability)> {
match *self.kind() {
pub fn maybe_pinned_ref(
self,
) -> Option<(Ty<'tcx>, ty::Pinnedness, ty::Mutability, Region<'tcx>)> {
match self.kind() {
Adt(def, args)
if def.is_pin()
&& let ty::Ref(_, ty, mutbl) = *args.type_at(0).kind() =>
&& let &ty::Ref(region, ty, mutbl) = args.type_at(0).kind() =>
{
Some((ty, ty::Pinnedness::Pinned, mutbl))
Some((ty, ty::Pinnedness::Pinned, mutbl, region))
}
ty::Ref(_, ty, mutbl) => Some((ty, ty::Pinnedness::Not, mutbl)),
&Ref(region, ty, mutbl) => Some((ty, ty::Pinnedness::Not, mutbl, region)),
_ => None,
}
}

View file

@ -642,12 +642,8 @@ impl<'tcx> TyCtxt<'tcx> {
/// has its own type-checking context or "inference environment".
///
/// For example, a closure has its own `DefId`, but it is type-checked
/// with the containing item. Similarly, an inline const block has its
/// own `DefId` but it is type-checked together with the containing item.
///
/// Therefore, when we fetch the
/// `typeck` the closure, for example, we really wind up
/// fetching the `typeck` the enclosing fn item.
/// with the containing item. Therefore, when we fetch the `typeck` of the closure,
/// for example, we really wind up fetching the `typeck` of the enclosing fn item.
pub fn typeck_root_def_id(self, def_id: DefId) -> DefId {
let mut def_id = def_id;
while self.is_typeck_child(def_id) {

View file

@ -14,7 +14,7 @@ use rustc_middle::middle::region;
use rustc_middle::mir::{self, AssignOp, BinOp, BorrowKind, UnOp};
use rustc_middle::thir::*;
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, PointerCoercion,
Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, DerefAdjustKind, PointerCoercion,
};
use rustc_middle::ty::{
self, AdtKind, GenericArgs, InlineConstArgs, InlineConstArgsParts, ScalarInt, Ty, UpvarArgs,
@ -140,11 +140,11 @@ impl<'tcx> ThirBuildCx<'tcx> {
}
Adjust::NeverToAny if adjustment.target.is_never() => return expr,
Adjust::NeverToAny => ExprKind::NeverToAny { source: self.thir.exprs.push(expr) },
Adjust::Deref(None) => {
Adjust::Deref(DerefAdjustKind::Builtin) => {
adjust_span(&mut expr);
ExprKind::Deref { arg: self.thir.exprs.push(expr) }
}
Adjust::Deref(Some(deref)) => {
Adjust::Deref(DerefAdjustKind::Overloaded(deref)) => {
// We don't need to do call adjust_span here since
// deref coercions always start with a built-in deref.
let call_def_id = deref.method_call(self.tcx);

View file

@ -6,7 +6,6 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
either = "1"
hashbrown = { version = "0.16.1", default-features = false }
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_arena = { path = "../rustc_arena" }

View file

@ -88,7 +88,6 @@ use std::borrow::Cow;
use std::hash::{Hash, Hasher};
use either::Either;
use hashbrown::hash_table::{Entry, HashTable};
use itertools::Itertools as _;
use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
use rustc_arena::DroplessArena;
@ -99,6 +98,7 @@ use rustc_const_eval::interpret::{
};
use rustc_data_structures::fx::FxHasher;
use rustc_data_structures::graph::dominators::Dominators;
use rustc_data_structures::hash_table::{Entry, HashTable};
use rustc_hir::def::DefKind;
use rustc_index::bit_set::DenseBitSet;
use rustc_index::{IndexVec, newtype_index};
@ -1591,10 +1591,12 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
(Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
Some(Transmute)
}
// If would be legal to always do this, but we don't want to hide information
// It would be legal to always do this, but we don't want to hide information
// from the backend that it'd otherwise be able to use for optimizations.
(Transmute, Transmute)
if !self.type_may_have_niche_of_interest_to_backend(from) =>
if !self.transmute_may_have_niche_of_interest_to_backend(
inner_from, from, to,
) =>
{
Some(Transmute)
}
@ -1642,24 +1644,65 @@ impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
}
}
/// Returns `false` if we know for sure that this type has no interesting niche,
/// and thus we can skip transmuting through it without worrying.
/// Returns `false` if we're confident that the middle type doesn't have an
/// interesting niche so we can skip that step when transmuting.
///
/// The backend will emit `assume`s when transmuting between types with niches,
/// so we want to preserve `i32 -> char -> u32` so that that data is around,
/// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
let Ok(layout) = self.ecx.layout_of(ty) else {
fn transmute_may_have_niche_of_interest_to_backend(
&self,
from_ty: Ty<'tcx>,
middle_ty: Ty<'tcx>,
to_ty: Ty<'tcx>,
) -> bool {
let Ok(middle_layout) = self.ecx.layout_of(middle_ty) else {
// If it's too generic or something, then assume it might be interesting later.
return true;
};
if layout.uninhabited {
if middle_layout.uninhabited {
return true;
}
match layout.backend_repr {
BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
match middle_layout.backend_repr {
BackendRepr::Scalar(mid) => {
if mid.is_always_valid(&self.ecx) {
// With no niche it's never interesting, so don't bother
// looking at the layout of the other two types.
false
} else if let Ok(from_layout) = self.ecx.layout_of(from_ty)
&& !from_layout.uninhabited
&& from_layout.size == middle_layout.size
&& let BackendRepr::Scalar(from_a) = from_layout.backend_repr
&& let mid_range = mid.valid_range(&self.ecx)
&& let from_range = from_a.valid_range(&self.ecx)
&& mid_range.contains_range(from_range, middle_layout.size)
{
// The `from_range` is a (non-strict) subset of `mid_range`
// such as if we're doing `bool` -> `ascii::Char` -> `_`,
// where `from_range: 0..=1` and `mid_range: 0..=127`,
// and thus the middle doesn't tell us anything we don't
// already know from the initial type.
false
} else if let Ok(to_layout) = self.ecx.layout_of(to_ty)
&& !to_layout.uninhabited
&& to_layout.size == middle_layout.size
&& let BackendRepr::Scalar(to_a) = to_layout.backend_repr
&& let mid_range = mid.valid_range(&self.ecx)
&& let to_range = to_a.valid_range(&self.ecx)
&& mid_range.contains_range(to_range, middle_layout.size)
{
// The `to_range` is a (non-strict) subset of `mid_range`
// such as if we're doing `_` -> `ascii::Char` -> `bool`,
// where `mid_range: 0..=127` and `to_range: 0..=1`,
// and thus the middle doesn't tell us anything we don't
// already know from the final type.
false
} else {
true
}
}
BackendRepr::ScalarPair(a, b) => {
!a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
}

View file

@ -1086,11 +1086,6 @@ impl<'a, 'tcx> AssignmentResult<'a, 'tcx> {
let Some((name, decl_span)) = self.checked_places.names[index] else { continue };
// By convention, underscore-prefixed bindings are explicitly allowed to be unused.
if name.as_str().starts_with('_') {
continue;
}
let is_maybe_drop_guard = maybe_drop_guard(
tcx,
self.typing_env,
@ -1118,6 +1113,11 @@ impl<'a, 'tcx> AssignmentResult<'a, 'tcx> {
continue;
};
// By convention, underscore-prefixed bindings are allowed to be unused explicitly
if name.as_str().starts_with('_') {
break;
}
match kind {
AccessKind::Assign => {
let suggestion = annotate_mut_binding_to_immutable_binding(

View file

@ -623,7 +623,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
self.dcx().emit_err(errors::FrontmatterInvalidInfostring { span });
}
let last_line_start = real_s.rfind('\n').map_or(0, |i| i + 1);
let last_line_start = real_s.rfind('\n').map_or(line_end, |i| i + 1);
let content = &real_s[line_end..last_line_start];
if let Some(cr_offset) = content.find('\r') {

View file

@ -1749,6 +1749,12 @@ impl<'a> Parser<'a> {
hi = self.prev_token.span;
let ann = BindingMode(by_ref, mutability);
let fieldpat = self.mk_pat_ident(boxed_span.to(hi), ann, fieldname);
if matches!(
fieldpat.kind,
PatKind::Ident(BindingMode(ByRef::Yes(..), Mutability::Mut), ..)
) {
self.psess.gated_spans.gate(sym::mut_ref, fieldpat.span);
}
let subpat = if is_box {
self.mk_pat(lo.to(hi), PatKind::Box(Box::new(fieldpat)))
} else {

View file

@ -302,8 +302,6 @@ passes_layout_align =
align: {$align}
passes_layout_homogeneous_aggregate =
homogeneous_aggregate: {$homogeneous_aggregate}
passes_layout_invalid_attribute =
`#[rustc_layout]` can only be applied to `struct`/`enum`/`union` declarations and type aliases
passes_layout_of =
layout_of({$normalized_ty}) = {$ty_layout}
passes_layout_size =

View file

@ -299,6 +299,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::RustcDumpVtable(..)
| AttributeKind::RustcDynIncompatibleTrait(..)
| AttributeKind::RustcHasIncoherentInherentImpls
| AttributeKind::RustcLayout(..)
| AttributeKind::RustcLayoutScalarValidRangeEnd(..)
| AttributeKind::RustcLayoutScalarValidRangeStart(..)
| AttributeKind::RustcLintOptDenyFieldAccess { .. }
@ -309,6 +310,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::RustcMain
| AttributeKind::RustcNeverReturnsNullPointer
| AttributeKind::RustcNoImplicitAutorefs
| AttributeKind::RustcNonConstTraitMethod
| AttributeKind::RustcNounwind
| AttributeKind::RustcObjcClass { .. }
| AttributeKind::RustcObjcSelector { .. }
@ -334,7 +336,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::Used { .. }
| AttributeKind::WindowsSubsystem(..)
// tidy-alphabetical-end
) => { /* do nothing */ }
Attribute::Unparsed(attr_item) => {
style = Some(attr_item.style);

View file

@ -519,13 +519,6 @@ pub(crate) struct LayoutOf<'tcx> {
pub ty_layout: String,
}
#[derive(Diagnostic)]
#[diag(passes_layout_invalid_attribute)]
pub(crate) struct LayoutInvalidAttribute {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(passes_abi_of)]
pub(crate) struct AbiOf {

View file

@ -1,20 +1,18 @@
use rustc_abi::{HasDataLayout, TargetDataLayout};
use rustc_hir::Attribute;
use rustc_hir::attrs::{AttributeKind, RustcLayoutType};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::find_attr;
use rustc_middle::span_bug;
use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::Span;
use rustc_span::source_map::Spanned;
use rustc_span::{Span, sym};
use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
use rustc_trait_selection::infer::TyCtxtInferExt;
use rustc_trait_selection::traits;
use crate::errors::{
LayoutAbi, LayoutAlign, LayoutHomogeneousAggregate, LayoutInvalidAttribute, LayoutOf,
LayoutSize, UnrecognizedArgument,
};
use crate::errors::{LayoutAbi, LayoutAlign, LayoutHomogeneousAggregate, LayoutOf, LayoutSize};
pub fn test_layout(tcx: TyCtxt<'_>) {
if !tcx.features().rustc_attrs() {
@ -22,14 +20,14 @@ pub fn test_layout(tcx: TyCtxt<'_>) {
return;
}
for id in tcx.hir_crate_items(()).definitions() {
for attr in tcx.get_attrs(id, sym::rustc_layout) {
match tcx.def_kind(id) {
DefKind::TyAlias | DefKind::Enum | DefKind::Struct | DefKind::Union => {
dump_layout_of(tcx, id, attr);
}
_ => {
tcx.dcx().emit_err(LayoutInvalidAttribute { span: tcx.def_span(id) });
}
let attrs = tcx.get_all_attrs(id);
if let Some(attrs) = find_attr!(attrs, AttributeKind::RustcLayout(attrs) => attrs) {
// Attribute parsing handles error reporting
if matches!(
tcx.def_kind(id),
DefKind::TyAlias | DefKind::Enum | DefKind::Struct | DefKind::Union
) {
dump_layout_of(tcx, id, attrs);
}
}
}
@ -66,7 +64,7 @@ pub fn ensure_wf<'tcx>(
}
}
fn dump_layout_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribute) {
fn dump_layout_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attrs: &[RustcLayoutType]) {
let typing_env = ty::TypingEnv::post_analysis(tcx, item_def_id);
let ty = tcx.type_of(item_def_id).instantiate_identity();
let span = tcx.def_span(item_def_id.to_def_id());
@ -75,32 +73,29 @@ fn dump_layout_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribute) {
}
match tcx.layout_of(typing_env.as_query_input(ty)) {
Ok(ty_layout) => {
// Check out the `#[rustc_layout(..)]` attribute to tell what to dump.
// The `..` are the names of fields to dump.
let meta_items = attr.meta_item_list().unwrap_or_default();
for meta_item in meta_items {
match meta_item.name() {
for attr in attrs {
match attr {
// FIXME: this never was about ABI and now this dump arg is confusing
Some(sym::abi) => {
RustcLayoutType::Abi => {
tcx.dcx().emit_err(LayoutAbi {
span,
abi: format!("{:?}", ty_layout.backend_repr),
});
}
Some(sym::align) => {
RustcLayoutType::Align => {
tcx.dcx().emit_err(LayoutAlign {
span,
align: format!("{:?}", ty_layout.align),
});
}
Some(sym::size) => {
RustcLayoutType::Size => {
tcx.dcx()
.emit_err(LayoutSize { span, size: format!("{:?}", ty_layout.size) });
}
Some(sym::homogeneous_aggregate) => {
RustcLayoutType::HomogenousAggregate => {
tcx.dcx().emit_err(LayoutHomogeneousAggregate {
span,
homogeneous_aggregate: format!(
@ -111,16 +106,12 @@ fn dump_layout_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribute) {
});
}
Some(sym::debug) => {
RustcLayoutType::Debug => {
let normalized_ty = tcx.normalize_erasing_regions(typing_env, ty);
// FIXME: using the `Debug` impl here isn't ideal.
let ty_layout = format!("{:#?}", *ty_layout);
tcx.dcx().emit_err(LayoutOf { span, normalized_ty, ty_layout });
}
_ => {
tcx.dcx().emit_err(UnrecognizedArgument { span: meta_item.span() });
}
}
}
}

View file

@ -4,8 +4,8 @@ use std::iter::once;
use rustc_abi::{FIRST_VARIANT, FieldIdx, Integer, VariantIdx};
use rustc_arena::DroplessArena;
use rustc_hir::HirId;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, HirId};
use rustc_index::{Idx, IndexVec};
use rustc_middle::middle::stability::EvalResult;
use rustc_middle::thir::{self, Pat, PatKind, PatRange, PatRangeBoundary};
@ -471,9 +471,9 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
PatKind::Deref { pin, subpattern } => {
fields = vec![self.lower_pat(subpattern).at_index(0)];
arity = 1;
ctor = match pin {
hir::Pinnedness::Not if ty.is_ref() => Ref,
hir::Pinnedness::Pinned if let Some((inner_ty, _)) = ty.pinned_ref() => {
ctor = match (pin, ty.maybe_pinned_ref()) {
(ty::Pinnedness::Not, Some((_, ty::Pinnedness::Not, _, _))) => Ref,
(ty::Pinnedness::Pinned, Some((inner_ty, ty::Pinnedness::Pinned, _, _))) => {
self.internal_state.has_lowered_deref_pat.set(true);
DerefPattern(RevealedTy(inner_ty))
}

View file

@ -1,5 +1,6 @@
// tidy-alphabetical-start
#![feature(associated_type_defaults)]
#![feature(default_field_values)]
#![feature(try_blocks)]
// tidy-alphabetical-end
@ -29,8 +30,8 @@ use rustc_middle::middle::privacy::{EffectiveVisibilities, EffectiveVisibility,
use rustc_middle::query::Providers;
use rustc_middle::ty::print::PrintTraitRefExt as _;
use rustc_middle::ty::{
self, Const, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
TypeVisitor,
self, AssocContainer, Const, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeSuperVisitable,
TypeVisitable, TypeVisitor,
};
use rustc_middle::{bug, span_bug};
use rustc_session::lint;
@ -311,6 +312,18 @@ where
}
}
fn assoc_has_type_of(tcx: TyCtxt<'_>, item: &ty::AssocItem) -> bool {
if let ty::AssocKind::Type { data: ty::AssocTypeData::Normal(..) } = item.kind
&& let hir::Node::TraitItem(item) =
tcx.hir_node(tcx.local_def_id_to_hir_id(item.def_id.expect_local()))
&& let hir::TraitItemKind::Type(_, None) = item.kind
{
false
} else {
true
}
}
fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility {
if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 }
}
@ -639,6 +652,19 @@ impl<'tcx> EmbargoVisitor<'tcx> {
}
impl<'tcx> EmbargoVisitor<'tcx> {
fn check_assoc_item(&mut self, item: &ty::AssocItem, item_ev: EffectiveVisibility) {
let def_id = item.def_id.expect_local();
let tcx = self.tcx;
let mut reach = self.reach(def_id, item_ev);
reach.generics().predicates();
if assoc_has_type_of(tcx, item) {
reach.ty();
}
if item.is_type() && item.container == AssocContainer::Trait {
reach.bounds();
}
}
fn check_def_id(&mut self, owner_id: OwnerId) {
// Update levels of nested things and mark all items
// in interfaces of reachable items as reachable.
@ -669,22 +695,10 @@ impl<'tcx> EmbargoVisitor<'tcx> {
self.reach(owner_id.def_id, item_ev).generics().predicates();
for assoc_item in self.tcx.associated_items(owner_id).in_definition_order() {
if assoc_item.is_impl_trait_in_trait() {
continue;
}
let def_id = assoc_item.def_id.expect_local();
self.update(def_id, item_ev, Level::Reachable);
let tcx = self.tcx;
let mut reach = self.reach(def_id, item_ev);
reach.generics().predicates();
if assoc_item.is_type() && !assoc_item.defaultness(tcx).has_value() {
// No type to visit.
} else {
reach.ty();
}
self.check_assoc_item(assoc_item, item_ev);
}
}
}
@ -722,17 +736,13 @@ impl<'tcx> EmbargoVisitor<'tcx> {
}
for assoc_item in self.tcx.associated_items(owner_id).in_definition_order() {
if assoc_item.is_impl_trait_in_trait() {
continue;
}
let def_id = assoc_item.def_id.expect_local();
let max_vis =
if of_trait { None } else { Some(self.tcx.local_visibility(def_id)) };
self.update_eff_vis(def_id, item_ev, max_vis, Level::Direct);
if let Some(impl_item_ev) = self.get(def_id) {
self.reach(def_id, impl_item_ev).generics().predicates().ty();
self.check_assoc_item(assoc_item, impl_item_ev);
}
}
}
@ -824,7 +834,12 @@ impl ReachEverythingInTheInterfaceVisitor<'_, '_> {
}
fn predicates(&mut self) -> &mut Self {
self.visit_predicates(self.ev.tcx.predicates_of(self.item_def_id));
self.visit_predicates(self.ev.tcx.explicit_predicates_of(self.item_def_id));
self
}
fn bounds(&mut self) -> &mut Self {
self.visit_clauses(self.ev.tcx.explicit_item_bounds(self.item_def_id).skip_binder());
self
}
@ -1353,26 +1368,20 @@ struct SearchInterfaceForPrivateItemsVisitor<'tcx> {
/// The visitor checks that each component type is at least this visible.
required_visibility: ty::Visibility,
required_effective_vis: Option<EffectiveVisibility>,
in_assoc_ty: bool,
in_primary_interface: bool,
skip_assoc_tys: bool,
hard_error: bool = false,
in_primary_interface: bool = true,
skip_assoc_tys: bool = false,
}
impl SearchInterfaceForPrivateItemsVisitor<'_> {
fn generics(&mut self) -> &mut Self {
self.in_primary_interface = true;
for param in &self.tcx.generics_of(self.item_def_id).own_params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { has_default, .. } => {
if has_default {
let _ = self.visit(self.tcx.type_of(param.def_id).instantiate_identity());
}
}
// FIXME(generic_const_exprs): May want to look inside const here
GenericParamDefKind::Const { .. } => {
let _ = self.visit(self.tcx.type_of(param.def_id).instantiate_identity());
}
if let GenericParamDefKind::Const { .. } = param.kind {
let _ = self.visit(self.tcx.type_of(param.def_id).instantiate_identity());
}
if let Some(default) = param.default_value(self.tcx) {
let _ = self.visit(default.instantiate_identity());
}
}
self
@ -1427,7 +1436,7 @@ impl SearchInterfaceForPrivateItemsVisitor<'_> {
};
let vis = self.tcx.local_visibility(local_def_id);
if self.in_assoc_ty && !vis.is_at_least(self.required_visibility, self.tcx) {
if self.hard_error && !vis.is_at_least(self.required_visibility, self.tcx) {
let vis_descr = match vis {
ty::Visibility::Public => "public",
ty::Visibility::Restricted(vis_def_id) => {
@ -1544,9 +1553,7 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'_, 'tcx> {
item_def_id: def_id,
required_visibility,
required_effective_vis,
in_assoc_ty: false,
in_primary_interface: true,
skip_assoc_tys: false,
..
}
}
@ -1584,16 +1591,17 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'_, 'tcx> {
) {
let mut check = self.check(item.def_id.expect_local(), vis, effective_vis);
let (check_ty, is_assoc_ty) = match item.kind {
ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. } => (true, false),
ty::AssocKind::Type { .. } => (item.defaultness(self.tcx).has_value(), true),
};
check.in_assoc_ty = is_assoc_ty;
let is_assoc_ty = item.is_type();
check.hard_error = is_assoc_ty && !item.is_impl_trait_in_trait();
check.generics().predicates();
if check_ty {
if assoc_has_type_of(self.tcx, item) {
check.hard_error = check.hard_error && item.defaultness(self.tcx).has_value();
check.ty();
}
if is_assoc_ty && item.container == AssocContainer::Trait {
check.hard_error = false;
check.bounds();
}
}
fn get(&self, def_id: LocalDefId) -> Option<EffectiveVisibility> {
@ -1625,20 +1633,7 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'_, 'tcx> {
self.check(def_id, item_visibility, effective_vis).generics().predicates();
for assoc_item in tcx.associated_items(id.owner_id).in_definition_order() {
if assoc_item.is_impl_trait_in_trait() {
continue;
}
self.check_assoc_item(assoc_item, item_visibility, effective_vis);
if assoc_item.is_type() {
self.check(
assoc_item.def_id.expect_local(),
item_visibility,
effective_vis,
)
.bounds();
}
}
}
DefKind::TraitAlias => {
@ -1712,10 +1707,6 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'_, 'tcx> {
}
for assoc_item in tcx.associated_items(id.owner_id).in_definition_order() {
if assoc_item.is_impl_trait_in_trait() {
continue;
}
let impl_item_vis = if !of_trait {
min(tcx.local_visibility(assoc_item.def_id.expect_local()), impl_vis, tcx)
} else {

View file

@ -21,7 +21,7 @@ use rustc_query_system::dep_graph::SerializedDepNodeIndex;
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{
CycleError, CycleErrorHandling, HashResult, QueryCache, QueryDispatcher, QueryMap, QueryMode,
QueryStackDeferred, QueryState, get_query_incr, get_query_non_incr,
QueryState, get_query_incr, get_query_non_incr,
};
use rustc_span::{ErrorGuaranteed, Span};
@ -66,7 +66,7 @@ impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDA
// This is `impl QueryDispatcher for SemiDynamicQueryDispatcher`.
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool>
QueryDispatcher for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
QueryDispatcher<'tcx> for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
where
for<'a> C::Key: HashStable<StableHashingContext<'a>>,
{
@ -81,15 +81,12 @@ where
}
#[inline(always)]
fn cache_on_disk(self, tcx: TyCtxt<'tcx>, key: &Self::Key) -> bool {
(self.vtable.cache_on_disk)(tcx, key)
fn will_cache_on_disk_for_key(self, tcx: TyCtxt<'tcx>, key: &Self::Key) -> bool {
self.vtable.will_cache_on_disk_for_key_fn.map_or(false, |f| f(tcx, key))
}
#[inline(always)]
fn query_state<'a>(
self,
qcx: QueryCtxt<'tcx>,
) -> &'a QueryState<Self::Key, QueryStackDeferred<'tcx>>
fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState<'tcx, Self::Key>
where
QueryCtxt<'tcx>: 'a,
{
@ -98,7 +95,7 @@ where
unsafe {
&*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>)
.byte_add(self.vtable.query_state)
.cast::<QueryState<Self::Key, QueryStackDeferred<'tcx>>>()
.cast::<QueryState<'tcx, Self::Key>>()
}
}
@ -131,21 +128,18 @@ where
prev_index: SerializedDepNodeIndex,
index: DepNodeIndex,
) -> Option<Self::Value> {
if self.vtable.can_load_from_disk {
(self.vtable.try_load_from_disk)(qcx.tcx, key, prev_index, index)
} else {
None
}
// `?` will return None immediately for queries that never cache to disk.
self.vtable.try_load_from_disk_fn?(qcx.tcx, key, prev_index, index)
}
#[inline]
fn loadable_from_disk(
fn is_loadable_from_disk(
self,
qcx: QueryCtxt<'tcx>,
key: &Self::Key,
index: SerializedDepNodeIndex,
) -> bool {
(self.vtable.loadable_from_disk)(qcx.tcx, key, index)
self.vtable.is_loadable_from_disk_fn.map_or(false, |f| f(qcx.tcx, key, index))
}
fn value_from_cycle_error(
@ -211,13 +205,15 @@ where
/// on the type `rustc_query_impl::query_impl::$name::QueryType`.
trait QueryDispatcherUnerased<'tcx> {
type UnerasedValue;
type Dispatcher: QueryDispatcher<Qcx = QueryCtxt<'tcx>>;
type Dispatcher: QueryDispatcher<'tcx, Qcx = QueryCtxt<'tcx>>;
const NAME: &'static &'static str;
fn query_dispatcher(tcx: TyCtxt<'tcx>) -> Self::Dispatcher;
fn restore_val(value: <Self::Dispatcher as QueryDispatcher>::Value) -> Self::UnerasedValue;
fn restore_val(
value: <Self::Dispatcher as QueryDispatcher<'tcx>>::Value,
) -> Self::UnerasedValue;
}
pub fn query_system<'a>(

View file

@ -60,9 +60,7 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
}
}
impl<'tcx> QueryContext for QueryCtxt<'tcx> {
type QueryInfo = QueryStackDeferred<'tcx>;
impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
#[inline]
fn jobserver_proxy(&self) -> &Proxy {
&self.tcx.jobserver_proxy
@ -93,10 +91,7 @@ impl<'tcx> QueryContext for QueryCtxt<'tcx> {
/// Prefer passing `false` to `require_complete` to avoid potential deadlocks,
/// especially when called from within a deadlock handler, unless a
/// complete map is needed and no deadlock is possible at this call site.
fn collect_active_jobs(
self,
require_complete: bool,
) -> Result<QueryMap<QueryStackDeferred<'tcx>>, QueryMap<QueryStackDeferred<'tcx>>> {
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>> {
let mut jobs = QueryMap::default();
let mut complete = true;
@ -322,7 +317,7 @@ macro_rules! should_ever_cache_on_disk {
};
}
fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>(
fn mk_query_stack_frame_extra<'tcx, K: Key + Copy + 'tcx>(
(tcx, key, kind, name, do_describe): (
TyCtxt<'tcx>,
K,
@ -373,18 +368,16 @@ pub(crate) fn create_query_frame<
) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
let def_id = key.key_as_def_id();
let hash = || {
tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
kind.as_usize().hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<Hash64>()
})
};
let hash = tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
kind.as_usize().hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<Hash64>()
});
let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle();
let info =
QueryStackDeferred::new((tcx, key, kind, name, do_describe), create_query_frame_extra);
QueryStackDeferred::new((tcx, key, kind, name, do_describe), mk_query_stack_frame_extra);
QueryStackFrame::new(info, kind, hash, def_id, def_id_for_ty_in_cycle)
}
@ -403,7 +396,7 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
assert!(query.query_state(qcx).all_inactive());
let cache = query.query_cache(qcx);
cache.iter(&mut |key, value, dep_node| {
if query.cache_on_disk(qcx.tcx, key) {
if query.will_cache_on_disk_for_key(qcx.tcx, key) {
let dep_node = SerializedDepNodeIndex::new(dep_node.index());
// Record position of the cache entry.
@ -417,7 +410,7 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
}
pub(crate) fn query_key_hash_verify<'tcx>(
query: impl QueryDispatcher<Qcx = QueryCtxt<'tcx>>,
query: impl QueryDispatcher<'tcx, Qcx = QueryCtxt<'tcx>>,
qcx: QueryCtxt<'tcx>,
) {
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());
@ -445,14 +438,14 @@ pub(crate) fn query_key_hash_verify<'tcx>(
fn try_load_from_on_disk_cache<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode)
where
Q: QueryDispatcher<Qcx = QueryCtxt<'tcx>>,
Q: QueryDispatcher<'tcx, Qcx = QueryCtxt<'tcx>>,
{
debug_assert!(tcx.dep_graph.is_green(&dep_node));
let key = Q::Key::recover(tcx, &dep_node).unwrap_or_else(|| {
panic!("Failed to recover key for {:?} with hash {}", dep_node, dep_node.hash)
});
if query.cache_on_disk(tcx, &key) {
if query.will_cache_on_disk_for_key(tcx, &key) {
let _ = query.execute_query(tcx, key);
}
}
@ -491,7 +484,7 @@ where
fn force_from_dep_node<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode) -> bool
where
Q: QueryDispatcher<Qcx = QueryCtxt<'tcx>>,
Q: QueryDispatcher<'tcx, Qcx = QueryCtxt<'tcx>>,
{
// We must avoid ever having to call `force_from_dep_node()` for a
// `DepNode::codegen_unit`:
@ -655,7 +648,11 @@ macro_rules! define_queries {
cycle_error_handling: cycle_error_handling!([$($modifiers)*]),
query_state: std::mem::offset_of!(QueryStates<'tcx>, $name),
query_cache: std::mem::offset_of!(QueryCaches<'tcx>, $name),
cache_on_disk: |tcx, key| ::rustc_middle::query::cached::$name(tcx, key),
will_cache_on_disk_for_key_fn: should_ever_cache_on_disk!([$($modifiers)*] {
Some(::rustc_middle::query::cached::$name)
} {
None
}),
execute_query: |tcx, key| erase::erase_val(tcx.$name(key)),
compute: |tcx, key| {
#[cfg(debug_assertions)]
@ -673,37 +670,34 @@ macro_rules! define_queries {
)
)
},
can_load_from_disk: should_ever_cache_on_disk!([$($modifiers)*] true false),
try_load_from_disk: should_ever_cache_on_disk!([$($modifiers)*] {
|tcx, key, prev_index, index| {
if ::rustc_middle::query::cached::$name(tcx, key) {
let value = $crate::plumbing::try_load_from_disk::<
queries::$name::ProvidedValue<'tcx>
>(
tcx,
prev_index,
index,
);
value.map(|value| queries::$name::provided_to_erased(tcx, value))
} else {
None
try_load_from_disk_fn: should_ever_cache_on_disk!([$($modifiers)*] {
Some(|tcx, key, prev_index, index| {
// Check the `cache_on_disk_if` condition for this key.
if !::rustc_middle::query::cached::$name(tcx, key) {
return None;
}
}
let value: queries::$name::ProvidedValue<'tcx> =
$crate::plumbing::try_load_from_disk(tcx, prev_index, index)?;
// Arena-alloc the value if appropriate, and erase it.
Some(queries::$name::provided_to_erased(tcx, value))
})
} {
|_tcx, _key, _prev_index, _index| None
None
}),
is_loadable_from_disk_fn: should_ever_cache_on_disk!([$($modifiers)*] {
Some(|tcx, key, index| -> bool {
::rustc_middle::query::cached::$name(tcx, key) &&
$crate::plumbing::loadable_from_disk(tcx, index)
})
} {
None
}),
value_from_cycle_error: |tcx, cycle, guar| {
let result: queries::$name::Value<'tcx> = Value::from_cycle_error(tcx, cycle, guar);
erase::erase_val(result)
},
loadable_from_disk: |_tcx, _key, _index| {
should_ever_cache_on_disk!([$($modifiers)*] {
::rustc_middle::query::cached::$name(_tcx, _key) &&
$crate::plumbing::loadable_from_disk(_tcx, _index)
} {
false
})
},
hash_result: hash_result!([$($modifiers)*][queries::$name::Value<'tcx>]),
format_value: |value| format!("{:?}", erase::restore_val::<queries::$name::Value<'tcx>>(*value)),
}
@ -734,14 +728,14 @@ macro_rules! define_queries {
}
#[inline(always)]
fn restore_val(value: <Self::Dispatcher as QueryDispatcher>::Value) -> Self::UnerasedValue {
fn restore_val(value: <Self::Dispatcher as QueryDispatcher<'tcx>>::Value) -> Self::UnerasedValue {
erase::restore_val::<queries::$name::Value<'tcx>>(value)
}
}
pub(crate) fn collect_active_jobs<'tcx>(
tcx: TyCtxt<'tcx>,
qmap: &mut QueryMap<QueryStackDeferred<'tcx>>,
qmap: &mut QueryMap<'tcx>,
require_complete: bool,
) -> Option<()> {
let make_query = |tcx, key| {
@ -825,7 +819,7 @@ macro_rules! define_queries {
// These arrays are used for iteration and can't be indexed by `DepKind`.
const COLLECT_ACTIVE_JOBS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<QueryStackDeferred<'tcx>>, bool) -> Option<()>
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, bool) -> Option<()>
] =
&[$(query_impl::$name::collect_active_jobs),*];

View file

@ -23,8 +23,3 @@ rustc_thread_pool = { path = "../rustc_thread_pool" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing = "0.1"
# tidy-alphabetical-end
[dependencies.hashbrown]
version = "0.16.1"
default-features = false
features = ["nightly"] # for may_dangle

View file

@ -519,7 +519,11 @@ impl<D: Deps> DepGraph<D> {
/// This encodes a diagnostic by creating a node with an unique index and associating
/// `diagnostic` with it, for use in the next session.
#[inline]
pub fn record_diagnostic<Qcx: QueryContext>(&self, qcx: Qcx, diagnostic: &DiagInner) {
pub fn record_diagnostic<'tcx, Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
diagnostic: &DiagInner,
) {
if let Some(ref data) = self.data {
D::read_deps(|task_deps| match task_deps {
TaskDepsRef::EvalAlways | TaskDepsRef::Ignore => return,
@ -532,7 +536,7 @@ impl<D: Deps> DepGraph<D> {
/// This forces a diagnostic node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_diagnostic` in the previous session.
#[inline]
pub fn force_diagnostic_node<Qcx: QueryContext>(
pub fn force_diagnostic_node<'tcx, Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
prev_index: SerializedDepNodeIndex,
@ -658,7 +662,7 @@ impl<D: Deps> DepGraphData<D> {
}
#[inline]
pub(crate) fn prev_node_of(&self, prev_index: SerializedDepNodeIndex) -> DepNode {
pub(crate) fn prev_node_of(&self, prev_index: SerializedDepNodeIndex) -> &DepNode {
self.previous.index_to_node(prev_index)
}
@ -669,7 +673,7 @@ impl<D: Deps> DepGraphData<D> {
/// This encodes a diagnostic by creating a node with an unique index and associating
/// `diagnostic` with it, for use in the next session.
#[inline]
fn encode_diagnostic<Qcx: QueryContext>(
fn encode_diagnostic<'tcx, Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
diagnostic: &DiagInner,
@ -693,7 +697,7 @@ impl<D: Deps> DepGraphData<D> {
/// This forces a diagnostic node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_diagnostic` in the previous session.
#[inline]
fn force_diagnostic_node<Qcx: QueryContext>(
fn force_diagnostic_node<'tcx, Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
prev_index: SerializedDepNodeIndex,
@ -779,7 +783,7 @@ impl<D: Deps> DepGraphData<D> {
#[cfg(debug_assertions)]
self.current.record_edge(
dep_node_index,
self.previous.index_to_node(prev_index),
*self.previous.index_to_node(prev_index),
self.previous.fingerprint_by_index(prev_index),
);
@ -843,7 +847,7 @@ impl<D: Deps> DepGraph<D> {
DepNodeColor::Unknown
}
pub fn try_mark_green<Qcx: QueryContext<Deps = D>>(
pub fn try_mark_green<'tcx, Qcx: QueryContext<'tcx, Deps = D>>(
&self,
qcx: Qcx,
dep_node: &DepNode,
@ -858,7 +862,7 @@ impl<D: Deps> DepGraphData<D> {
/// A node will have an index, when it's already been marked green, or when we can mark it
/// green. This function will mark the current task as a reader of the specified node, when
/// a node index can be found for that node.
pub(crate) fn try_mark_green<Qcx: QueryContext<Deps = D>>(
pub(crate) fn try_mark_green<'tcx, Qcx: QueryContext<'tcx, Deps = D>>(
&self,
qcx: Qcx,
dep_node: &DepNode,
@ -883,7 +887,7 @@ impl<D: Deps> DepGraphData<D> {
}
#[instrument(skip(self, qcx, parent_dep_node_index, frame), level = "debug")]
fn try_mark_parent_green<Qcx: QueryContext<Deps = D>>(
fn try_mark_parent_green<'tcx, Qcx: QueryContext<'tcx, Deps = D>>(
&self,
qcx: Qcx,
parent_dep_node_index: SerializedDepNodeIndex,
@ -914,7 +918,7 @@ impl<D: Deps> DepGraphData<D> {
DepNodeColor::Unknown => {}
}
let dep_dep_node = &get_dep_dep_node();
let dep_dep_node = get_dep_dep_node();
// We don't know the state of this dependency. If it isn't
// an eval_always node, let's try to mark it green recursively.
@ -973,7 +977,7 @@ impl<D: Deps> DepGraphData<D> {
/// Try to mark a dep-node which existed in the previous compilation session as green.
#[instrument(skip(self, qcx, prev_dep_node_index, frame), level = "debug")]
fn try_mark_previous_green<Qcx: QueryContext<Deps = D>>(
fn try_mark_previous_green<'tcx, Qcx: QueryContext<'tcx, Deps = D>>(
&self,
qcx: Qcx,
prev_dep_node_index: SerializedDepNodeIndex,
@ -985,7 +989,7 @@ impl<D: Deps> DepGraphData<D> {
// We never try to mark eval_always nodes as green
debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind));
debug_assert_eq!(self.previous.index_to_node(prev_dep_node_index), *dep_node);
debug_assert_eq!(self.previous.index_to_node(prev_dep_node_index), dep_node);
let prev_deps = self.previous.edge_targets_from(prev_dep_node_index);
@ -1446,7 +1450,7 @@ fn panic_on_forbidden_read<D: Deps>(data: &DepGraphData<D>, dep_node_index: DepN
// previous session and has been marked green
for prev_index in data.colors.values.indices() {
if data.colors.current(prev_index) == Some(dep_node_index) {
dep_node = Some(data.previous.index_to_node(prev_index));
dep_node = Some(*data.previous.index_to_node(prev_index));
break;
}
}

View file

@ -83,9 +83,9 @@ pub trait DepContext: Copy {
}
/// Load data from the on-disk cache.
fn try_load_from_on_disk_cache(self, dep_node: DepNode) {
fn try_load_from_on_disk_cache(self, dep_node: &DepNode) {
if let Some(try_load_fn) = self.dep_kind_vtable(dep_node.kind).try_load_from_on_disk_cache {
try_load_fn(self, dep_node)
try_load_fn(self, *dep_node)
}
}

View file

@ -133,8 +133,8 @@ impl SerializedDepGraph {
}
#[inline]
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode {
self.nodes[dep_node_index]
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> &DepNode {
&self.nodes[dep_node_index]
}
#[inline]
@ -346,7 +346,7 @@ impl<D: Deps> SerializedNodeHeader<D> {
#[inline]
fn new(
node: DepNode,
node: &DepNode,
index: DepNodeIndex,
fingerprint: Fingerprint,
edge_max_index: u32,
@ -379,7 +379,7 @@ impl<D: Deps> SerializedNodeHeader<D> {
{
let res = Self { bytes, _marker: PhantomData };
assert_eq!(fingerprint, res.fingerprint());
assert_eq!(node, res.node());
assert_eq!(*node, res.node());
if let Some(len) = res.len() {
assert_eq!(edge_count, len as usize);
}
@ -452,7 +452,7 @@ struct NodeInfo {
impl NodeInfo {
fn encode<D: Deps>(&self, e: &mut MemEncoder, index: DepNodeIndex) {
let NodeInfo { node, fingerprint, ref edges } = *self;
let NodeInfo { ref node, fingerprint, ref edges } = *self;
let header = SerializedNodeHeader::<D>::new(
node,
index,
@ -482,7 +482,7 @@ impl NodeInfo {
#[inline]
fn encode_promoted<D: Deps>(
e: &mut MemEncoder,
node: DepNode,
node: &DepNode,
index: DepNodeIndex,
fingerprint: Fingerprint,
prev_index: SerializedDepNodeIndex,
@ -604,7 +604,7 @@ impl<D: Deps> EncoderState<D> {
#[inline]
fn record(
&self,
node: DepNode,
node: &DepNode,
index: DepNodeIndex,
edge_count: usize,
edges: impl FnOnce(&Self) -> Vec<DepNodeIndex>,
@ -622,7 +622,7 @@ impl<D: Deps> EncoderState<D> {
outline(move || {
// Do not ICE when a query is called from within `with_query`.
if let Some(record_graph) = &mut record_graph.try_lock() {
record_graph.push(index, node, &edges);
record_graph.push(index, *node, &edges);
}
});
}
@ -661,7 +661,7 @@ impl<D: Deps> EncoderState<D> {
node.encode::<D>(&mut local.encoder, index);
self.flush_mem_encoder(&mut *local);
self.record(
node.node,
&node.node,
index,
node.edges.len(),
|_| node.edges[..].to_vec(),

View file

@ -14,8 +14,8 @@ pub type HashResult<V> = Option<fn(&mut StableHashingContext<'_>, &V) -> Fingerp
/// Unambiguous shorthand for `<This::Qcx as HasDepContext>::DepContext`.
#[expect(type_alias_bounds)]
type DepContextOf<This: QueryDispatcher> =
<<This as QueryDispatcher>::Qcx as HasDepContext>::DepContext;
type DepContextOf<'tcx, This: QueryDispatcher<'tcx>> =
<<This as QueryDispatcher<'tcx>>::Qcx as HasDepContext>::DepContext;
/// Trait that can be used as a vtable for a single query, providing operations
/// and metadata for that query.
@ -25,15 +25,15 @@ type DepContextOf<This: QueryDispatcher> =
/// Those types are not visible from this `rustc_query_system` crate.
///
/// "Dispatcher" should be understood as a near-synonym of "vtable".
pub trait QueryDispatcher: Copy {
pub trait QueryDispatcher<'tcx>: Copy {
fn name(self) -> &'static str;
/// Query context used by this dispatcher, i.e. `rustc_query_impl::QueryCtxt`.
type Qcx: QueryContext;
type Qcx: QueryContext<'tcx>;
// `Key` and `Value` are `Copy` instead of `Clone` to ensure copying them stays cheap,
// but it isn't necessary.
type Key: DepNodeParams<DepContextOf<Self>> + Eq + Hash + Copy + Debug;
type Key: DepNodeParams<DepContextOf<'tcx, Self>> + Eq + Hash + Copy + Debug;
type Value: Copy;
type Cache: QueryCache<Key = Self::Key, Value = Self::Value>;
@ -41,18 +41,15 @@ pub trait QueryDispatcher: Copy {
fn format_value(self) -> fn(&Self::Value) -> String;
// Don't use this method to access query results, instead use the methods on TyCtxt
fn query_state<'a>(
self,
tcx: Self::Qcx,
) -> &'a QueryState<Self::Key, <Self::Qcx as QueryContext>::QueryInfo>;
fn query_state<'a>(self, tcx: Self::Qcx) -> &'a QueryState<'tcx, Self::Key>;
// Don't use this method to access query results, instead use the methods on TyCtxt
fn query_cache<'a>(self, tcx: Self::Qcx) -> &'a Self::Cache;
fn cache_on_disk(self, tcx: DepContextOf<Self>, key: &Self::Key) -> bool;
fn will_cache_on_disk_for_key(self, tcx: DepContextOf<'tcx, Self>, key: &Self::Key) -> bool;
// Don't use this method to compute query results, instead use the methods on TyCtxt
fn execute_query(self, tcx: DepContextOf<Self>, k: Self::Key) -> Self::Value;
fn execute_query(self, tcx: DepContextOf<'tcx, Self>, k: Self::Key) -> Self::Value;
fn compute(self, tcx: Self::Qcx, key: Self::Key) -> Self::Value;
@ -64,7 +61,7 @@ pub trait QueryDispatcher: Copy {
index: DepNodeIndex,
) -> Option<Self::Value>;
fn loadable_from_disk(
fn is_loadable_from_disk(
self,
qcx: Self::Qcx,
key: &Self::Key,
@ -74,7 +71,7 @@ pub trait QueryDispatcher: Copy {
/// Synthesize an error value to let compilation continue after a cycle.
fn value_from_cycle_error(
self,
tcx: DepContextOf<Self>,
tcx: DepContextOf<'tcx, Self>,
cycle_error: &CycleError<QueryStackFrameExtra>,
guar: ErrorGuaranteed,
) -> Self::Value;
@ -89,7 +86,7 @@ pub trait QueryDispatcher: Copy {
fn hash_result(self) -> HashResult<Self::Value>;
// Just here for convenience and checking that the key matches the kind, don't override this.
fn construct_dep_node(self, tcx: DepContextOf<Self>, key: &Self::Key) -> DepNode {
fn construct_dep_node(self, tcx: DepContextOf<'tcx, Self>, key: &Self::Key) -> DepNode {
DepNode::construct(tcx, self.dep_kind(), key)
}
}

View file

@ -12,7 +12,7 @@ use rustc_hir::def::DefKind;
use rustc_session::Session;
use rustc_span::{DUMMY_SP, Span};
use super::QueryStackFrameExtra;
use super::{QueryStackDeferred, QueryStackFrameExtra};
use crate::dep_graph::DepContext;
use crate::error::CycleStack;
use crate::query::plumbing::CycleError;
@ -26,8 +26,8 @@ pub struct QueryInfo<I> {
pub query: QueryStackFrame<I>,
}
impl<I> QueryInfo<I> {
pub(crate) fn lift<Qcx: QueryContext<QueryInfo = I>>(
impl<'tcx> QueryInfo<QueryStackDeferred<'tcx>> {
pub(crate) fn lift<Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
) -> QueryInfo<QueryStackFrameExtra> {
@ -35,39 +35,39 @@ impl<I> QueryInfo<I> {
}
}
pub type QueryMap<I> = FxHashMap<QueryJobId, QueryJobInfo<I>>;
pub type QueryMap<'tcx> = FxHashMap<QueryJobId, QueryJobInfo<'tcx>>;
/// A value uniquely identifying an active query job.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct QueryJobId(pub NonZero<u64>);
impl QueryJobId {
fn query<I: Clone>(self, map: &QueryMap<I>) -> QueryStackFrame<I> {
fn query<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
map.get(&self).unwrap().query.clone()
}
fn span<I>(self, map: &QueryMap<I>) -> Span {
fn span<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> Span {
map.get(&self).unwrap().job.span
}
fn parent<I>(self, map: &QueryMap<I>) -> Option<QueryJobId> {
fn parent<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> Option<QueryJobId> {
map.get(&self).unwrap().job.parent
}
fn latch<I>(self, map: &QueryMap<I>) -> Option<&QueryLatch<I>> {
fn latch<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> Option<&'a QueryLatch<'tcx>> {
map.get(&self).unwrap().job.latch.as_ref()
}
}
#[derive(Clone, Debug)]
pub struct QueryJobInfo<I> {
pub query: QueryStackFrame<I>,
pub job: QueryJob<I>,
pub struct QueryJobInfo<'tcx> {
pub query: QueryStackFrame<QueryStackDeferred<'tcx>>,
pub job: QueryJob<'tcx>,
}
/// Represents an active query job.
#[derive(Debug)]
pub struct QueryJob<I> {
pub struct QueryJob<'tcx> {
pub id: QueryJobId,
/// The span corresponding to the reason for which this query was required.
@ -77,23 +77,23 @@ pub struct QueryJob<I> {
pub parent: Option<QueryJobId>,
/// The latch that is used to wait on this job.
latch: Option<QueryLatch<I>>,
latch: Option<QueryLatch<'tcx>>,
}
impl<I> Clone for QueryJob<I> {
impl<'tcx> Clone for QueryJob<'tcx> {
fn clone(&self) -> Self {
Self { id: self.id, span: self.span, parent: self.parent, latch: self.latch.clone() }
}
}
impl<I> QueryJob<I> {
impl<'tcx> QueryJob<'tcx> {
/// Creates a new query job.
#[inline]
pub fn new(id: QueryJobId, span: Span, parent: Option<QueryJobId>) -> Self {
QueryJob { id, span, parent, latch: None }
}
pub(super) fn latch(&mut self) -> QueryLatch<I> {
pub(super) fn latch(&mut self) -> QueryLatch<'tcx> {
if self.latch.is_none() {
self.latch = Some(QueryLatch::new());
}
@ -113,12 +113,12 @@ impl<I> QueryJob<I> {
}
impl QueryJobId {
pub(super) fn find_cycle_in_stack<I: Clone>(
pub(super) fn find_cycle_in_stack<'tcx>(
&self,
query_map: QueryMap<I>,
query_map: QueryMap<'tcx>,
current_job: &Option<QueryJobId>,
span: Span,
) -> CycleError<I> {
) -> CycleError<QueryStackDeferred<'tcx>> {
// Find the waitee amongst `current_job` parents
let mut cycle = Vec::new();
let mut current_job = Option::clone(current_job);
@ -152,7 +152,10 @@ impl QueryJobId {
#[cold]
#[inline(never)]
pub fn find_dep_kind_root<I: Clone>(&self, query_map: QueryMap<I>) -> (QueryJobInfo<I>, usize) {
pub fn find_dep_kind_root<'tcx>(
&self,
query_map: QueryMap<'tcx>,
) -> (QueryJobInfo<'tcx>, usize) {
let mut depth = 1;
let info = query_map.get(&self).unwrap();
let dep_kind = info.query.dep_kind;
@ -172,31 +175,31 @@ impl QueryJobId {
}
#[derive(Debug)]
struct QueryWaiter<I> {
struct QueryWaiter<'tcx> {
query: Option<QueryJobId>,
condvar: Condvar,
span: Span,
cycle: Mutex<Option<CycleError<I>>>,
cycle: Mutex<Option<CycleError<QueryStackDeferred<'tcx>>>>,
}
#[derive(Debug)]
struct QueryLatchInfo<I> {
struct QueryLatchInfo<'tcx> {
complete: bool,
waiters: Vec<Arc<QueryWaiter<I>>>,
waiters: Vec<Arc<QueryWaiter<'tcx>>>,
}
#[derive(Debug)]
pub(super) struct QueryLatch<I> {
info: Arc<Mutex<QueryLatchInfo<I>>>,
pub(super) struct QueryLatch<'tcx> {
info: Arc<Mutex<QueryLatchInfo<'tcx>>>,
}
impl<I> Clone for QueryLatch<I> {
impl<'tcx> Clone for QueryLatch<'tcx> {
fn clone(&self) -> Self {
Self { info: Arc::clone(&self.info) }
}
}
impl<I> QueryLatch<I> {
impl<'tcx> QueryLatch<'tcx> {
fn new() -> Self {
QueryLatch {
info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })),
@ -206,10 +209,10 @@ impl<I> QueryLatch<I> {
/// Awaits for the query job to complete.
pub(super) fn wait_on(
&self,
qcx: impl QueryContext,
qcx: impl QueryContext<'tcx>,
query: Option<QueryJobId>,
span: Span,
) -> Result<(), CycleError<I>> {
) -> Result<(), CycleError<QueryStackDeferred<'tcx>>> {
let waiter =
Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() });
self.wait_on_inner(qcx, &waiter);
@ -224,7 +227,7 @@ impl<I> QueryLatch<I> {
}
/// Awaits the caller on this latch by blocking the current thread.
fn wait_on_inner(&self, qcx: impl QueryContext, waiter: &Arc<QueryWaiter<I>>) {
fn wait_on_inner(&self, qcx: impl QueryContext<'tcx>, waiter: &Arc<QueryWaiter<'tcx>>) {
let mut info = self.info.lock();
if !info.complete {
// We push the waiter on to the `waiters` list. It can be accessed inside
@ -260,7 +263,7 @@ impl<I> QueryLatch<I> {
/// Removes a single waiter from the list of waiters.
/// This is used to break query cycles.
fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter<I>> {
fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter<'tcx>> {
let mut info = self.info.lock();
debug_assert!(!info.complete);
// Remove the waiter from the list of waiters
@ -280,8 +283,8 @@ type Waiter = (QueryJobId, usize);
/// For visits of resumable waiters it returns Some(Some(Waiter)) which has the
/// required information to resume the waiter.
/// If all `visit` calls returns None, this function also returns None.
fn visit_waiters<I, F>(
query_map: &QueryMap<I>,
fn visit_waiters<'tcx, F>(
query_map: &QueryMap<'tcx>,
query: QueryJobId,
mut visit: F,
) -> Option<Option<Waiter>>
@ -314,8 +317,8 @@ where
/// `span` is the reason for the `query` to execute. This is initially DUMMY_SP.
/// If a cycle is detected, this initial value is replaced with the span causing
/// the cycle.
fn cycle_check<I>(
query_map: &QueryMap<I>,
fn cycle_check<'tcx>(
query_map: &QueryMap<'tcx>,
query: QueryJobId,
span: Span,
stack: &mut Vec<(Span, QueryJobId)>,
@ -354,8 +357,8 @@ fn cycle_check<I>(
/// Finds out if there's a path to the compiler root (aka. code which isn't in a query)
/// from `query` without going through any of the queries in `visited`.
/// This is achieved with a depth first search.
fn connected_to_root<I>(
query_map: &QueryMap<I>,
fn connected_to_root<'tcx>(
query_map: &QueryMap<'tcx>,
query: QueryJobId,
visited: &mut FxHashSet<QueryJobId>,
) -> bool {
@ -376,7 +379,7 @@ fn connected_to_root<I>(
}
// Deterministically pick an query from a list
fn pick_query<'a, I: Clone, T, F>(query_map: &QueryMap<I>, queries: &'a [T], f: F) -> &'a T
fn pick_query<'a, 'tcx, T, F>(query_map: &QueryMap<'tcx>, queries: &'a [T], f: F) -> &'a T
where
F: Fn(&T) -> (Span, QueryJobId),
{
@ -401,10 +404,10 @@ where
/// the function return true.
/// If a cycle was not found, the starting query is removed from `jobs` and
/// the function returns false.
fn remove_cycle<I: Clone>(
query_map: &QueryMap<I>,
fn remove_cycle<'tcx>(
query_map: &QueryMap<'tcx>,
jobs: &mut Vec<QueryJobId>,
wakelist: &mut Vec<Arc<QueryWaiter<I>>>,
wakelist: &mut Vec<Arc<QueryWaiter<'tcx>>>,
) -> bool {
let mut visited = FxHashSet::default();
let mut stack = Vec::new();
@ -505,10 +508,7 @@ fn remove_cycle<I: Clone>(
/// uses a query latch and then resuming that waiter.
/// There may be multiple cycles involved in a deadlock, so this searches
/// all active queries for cycles before finally resuming all the waiters at once.
pub fn break_query_cycles<I: Clone + Debug>(
query_map: QueryMap<I>,
registry: &rustc_thread_pool::Registry,
) {
pub fn break_query_cycles<'tcx>(query_map: QueryMap<'tcx>, registry: &rustc_thread_pool::Registry) {
let mut wakelist = Vec::new();
// It is OK per the comments:
// - https://github.com/rust-lang/rust/pull/131200#issuecomment-2798854932
@ -602,7 +602,7 @@ pub fn report_cycle<'a>(
sess.dcx().create_err(cycle_diag)
}
pub fn print_query_stack<Qcx: QueryContext>(
pub fn print_query_stack<'tcx, Qcx: QueryContext<'tcx>>(
qcx: Qcx,
mut current_query: Option<QueryJobId>,
dcx: DiagCtxtHandle<'_>,

View file

@ -58,22 +58,19 @@ pub struct QueryStackFrame<I> {
pub def_id_for_ty_in_cycle: Option<DefId>,
}
impl<I> QueryStackFrame<I> {
impl<'tcx> QueryStackFrame<QueryStackDeferred<'tcx>> {
#[inline]
pub fn new(
info: I,
info: QueryStackDeferred<'tcx>,
dep_kind: DepKind,
hash: impl FnOnce() -> Hash64,
hash: Hash64,
def_id: Option<DefId>,
def_id_for_ty_in_cycle: Option<DefId>,
) -> Self {
Self { info, def_id, dep_kind, hash: hash(), def_id_for_ty_in_cycle }
Self { info, def_id, dep_kind, hash, def_id_for_ty_in_cycle }
}
fn lift<Qcx: QueryContext<QueryInfo = I>>(
&self,
qcx: Qcx,
) -> QueryStackFrame<QueryStackFrameExtra> {
fn lift<Qcx: QueryContext<'tcx>>(&self, qcx: Qcx) -> QueryStackFrame<QueryStackFrameExtra> {
QueryStackFrame {
info: qcx.lift_query_info(&self.info),
dep_kind: self.dep_kind,
@ -159,9 +156,7 @@ pub enum QuerySideEffect {
Diagnostic(DiagInner),
}
pub trait QueryContext: HasDepContext {
type QueryInfo: Clone;
pub trait QueryContext<'tcx>: HasDepContext {
/// Gets a jobserver reference which is used to release then acquire
/// a token while waiting on a query.
fn jobserver_proxy(&self) -> &Proxy;
@ -171,12 +166,9 @@ pub trait QueryContext: HasDepContext {
/// Get the query information from the TLS context.
fn current_query_job(self) -> Option<QueryJobId>;
fn collect_active_jobs(
self,
require_complete: bool,
) -> Result<QueryMap<Self::QueryInfo>, QueryMap<Self::QueryInfo>>;
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>>;
fn lift_query_info(self, info: &Self::QueryInfo) -> QueryStackFrameExtra;
fn lift_query_info(self, info: &QueryStackDeferred<'tcx>) -> QueryStackFrameExtra;
/// Load a side effect associated to the node in the previous session.
fn load_side_effect(

View file

@ -7,9 +7,8 @@ use std::fmt::Debug;
use std::hash::Hash;
use std::mem;
use hashbrown::HashTable;
use hashbrown::hash_table::Entry;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::hash_table::{self, Entry, HashTable};
use rustc_data_structures::sharded::{self, Sharded};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::LockGuard;
@ -18,7 +17,7 @@ use rustc_errors::{Diag, FatalError, StashKey};
use rustc_span::{DUMMY_SP, Span};
use tracing::instrument;
use super::{QueryDispatcher, QueryStackFrameExtra};
use super::{QueryDispatcher, QueryStackDeferred, QueryStackFrameExtra};
use crate::dep_graph::{
DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams, HasDepContext,
};
@ -34,23 +33,35 @@ fn equivalent_key<K: Eq, V>(k: &K) -> impl Fn(&(K, V)) -> bool + '_ {
move |x| x.0 == *k
}
pub struct QueryState<K, I> {
active: Sharded<hashbrown::HashTable<(K, QueryResult<I>)>>,
/// For a particular query, keeps track of "active" keys, i.e. keys whose
/// evaluation has started but has not yet finished successfully.
///
/// (Successful query evaluation for a key is represented by an entry in the
/// query's in-memory cache.)
pub struct QueryState<'tcx, K> {
active: Sharded<hash_table::HashTable<(K, ActiveKeyStatus<'tcx>)>>,
}
/// Indicates the state of a query for a given key in a query map.
enum QueryResult<I> {
/// An already executing query. The query job can be used to await for its completion.
Started(QueryJob<I>),
/// For a particular query and key, tracks the status of a query evaluation
/// that has started, but has not yet finished successfully.
///
/// (Successful query evaluation for a key is represented by an entry in the
/// query's in-memory cache.)
enum ActiveKeyStatus<'tcx> {
/// Some thread is already evaluating the query for this key.
///
/// The enclosed [`QueryJob`] can be used to wait for it to finish.
Started(QueryJob<'tcx>),
/// The query panicked. Queries trying to wait on this will raise a fatal error which will
/// silently panic.
Poisoned,
}
impl<I> QueryResult<I> {
/// Unwraps the query job expecting that it has started.
fn expect_job(self) -> QueryJob<I> {
impl<'tcx> ActiveKeyStatus<'tcx> {
/// Obtains the enclosed [`QueryJob`], or panics if this query evaluation
/// was poisoned by a panic.
fn expect_job(self) -> QueryJob<'tcx> {
match self {
Self::Started(job) => job,
Self::Poisoned => {
@ -60,7 +71,7 @@ impl<I> QueryResult<I> {
}
}
impl<K, I> QueryState<K, I>
impl<'tcx, K> QueryState<'tcx, K>
where
K: Eq + Hash + Copy + Debug,
{
@ -71,15 +82,15 @@ where
pub fn collect_active_jobs<Qcx: Copy>(
&self,
qcx: Qcx,
make_query: fn(Qcx, K) -> QueryStackFrame<I>,
jobs: &mut QueryMap<I>,
make_query: fn(Qcx, K) -> QueryStackFrame<QueryStackDeferred<'tcx>>,
jobs: &mut QueryMap<'tcx>,
require_complete: bool,
) -> Option<()> {
let mut active = Vec::new();
let mut collect = |iter: LockGuard<'_, HashTable<(K, QueryResult<I>)>>| {
let mut collect = |iter: LockGuard<'_, HashTable<(K, ActiveKeyStatus<'tcx>)>>| {
for (k, v) in iter.iter() {
if let QueryResult::Started(ref job) = *v {
if let ActiveKeyStatus::Started(ref job) = *v {
active.push((*k, job.clone()));
}
}
@ -108,40 +119,40 @@ where
}
}
impl<K, I> Default for QueryState<K, I> {
fn default() -> QueryState<K, I> {
impl<'tcx, K> Default for QueryState<'tcx, K> {
fn default() -> QueryState<'tcx, K> {
QueryState { active: Default::default() }
}
}
/// A type representing the responsibility to execute the job in the `job` field.
/// This will poison the relevant query if dropped.
struct JobOwner<'tcx, K, I>
struct JobOwner<'a, 'tcx, K>
where
K: Eq + Hash + Copy,
{
state: &'tcx QueryState<K, I>,
state: &'a QueryState<'tcx, K>,
key: K,
}
#[cold]
#[inline(never)]
fn mk_cycle<Q>(query: Q, qcx: Q::Qcx, cycle_error: CycleError) -> Q::Value
fn mk_cycle<'tcx, Q>(query: Q, qcx: Q::Qcx, cycle_error: CycleError) -> Q::Value
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
let error = report_cycle(qcx.dep_context().sess(), &cycle_error);
handle_cycle_error(query, qcx, &cycle_error, error)
}
fn handle_cycle_error<Q>(
fn handle_cycle_error<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
cycle_error: &CycleError,
error: Diag<'_>,
) -> Q::Value
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
match query.cycle_error_handling() {
CycleErrorHandling::Error => {
@ -170,7 +181,7 @@ where
}
}
impl<'tcx, K, I> JobOwner<'tcx, K, I>
impl<'a, 'tcx, K> JobOwner<'a, 'tcx, K>
where
K: Eq + Hash + Copy,
{
@ -207,7 +218,7 @@ where
}
}
impl<'tcx, K, I> Drop for JobOwner<'tcx, K, I>
impl<'a, 'tcx, K> Drop for JobOwner<'a, 'tcx, K>
where
K: Eq + Hash + Copy,
{
@ -223,7 +234,7 @@ where
Err(_) => panic!(),
Ok(occupied) => {
let ((key, value), vacant) = occupied.remove();
vacant.insert((key, QueryResult::Poisoned));
vacant.insert((key, ActiveKeyStatus::Poisoned));
value.expect_job()
}
}
@ -241,8 +252,8 @@ pub struct CycleError<I = QueryStackFrameExtra> {
pub cycle: Vec<QueryInfo<I>>,
}
impl<I> CycleError<I> {
fn lift<Qcx: QueryContext<QueryInfo = I>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> {
impl<'tcx> CycleError<QueryStackDeferred<'tcx>> {
fn lift<Qcx: QueryContext<'tcx>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> {
CycleError {
usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))),
cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(),
@ -272,14 +283,14 @@ where
#[cold]
#[inline(never)]
fn cycle_error<Q>(
fn cycle_error<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
try_execute: QueryJobId,
span: Span,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
@ -290,16 +301,16 @@ where
}
#[inline(always)]
fn wait_for_query<Q>(
fn wait_for_query<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
span: Span,
key: Q::Key,
latch: QueryLatch<<Q::Qcx as QueryContext>::QueryInfo>,
latch: QueryLatch<'tcx>,
current: Option<QueryJobId>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
// For parallel queries, we'll block and wait until the query running
// in another thread has completed. Record how long we wait in the
@ -320,7 +331,7 @@ where
let shard = query.query_state(qcx).active.lock_shard_by_hash(key_hash);
match shard.find(key_hash, equivalent_key(&key)) {
// The query we waited on panicked. Continue unwinding here.
Some((_, QueryResult::Poisoned)) => FatalError.raise(),
Some((_, ActiveKeyStatus::Poisoned)) => FatalError.raise(),
_ => panic!(
"query '{}' result must be in the cache or the query must be poisoned after a wait",
query.name()
@ -339,7 +350,7 @@ where
}
#[inline(never)]
fn try_execute_query<Q, const INCR: bool>(
fn try_execute_query<'tcx, Q, const INCR: bool>(
query: Q,
qcx: Q::Qcx,
span: Span,
@ -347,7 +358,7 @@ fn try_execute_query<Q, const INCR: bool>(
dep_node: Option<DepNode>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
let state = query.query_state(qcx);
let key_hash = sharded::make_hash(&key);
@ -374,7 +385,7 @@ where
// state map.
let id = qcx.next_job_id();
let job = QueryJob::new(id, span, current_job_id);
entry.insert((key, QueryResult::Started(job)));
entry.insert((key, ActiveKeyStatus::Started(job)));
// Drop the lock before we start executing the query
drop(state_lock);
@ -383,7 +394,7 @@ where
}
Entry::Occupied(mut entry) => {
match &mut entry.get_mut().1 {
QueryResult::Started(job) => {
ActiveKeyStatus::Started(job) => {
if sync::is_dyn_thread_safe() {
// Get the latch out
let latch = job.latch();
@ -401,24 +412,24 @@ where
// so we just return the error.
cycle_error(query, qcx, id, span)
}
QueryResult::Poisoned => FatalError.raise(),
ActiveKeyStatus::Poisoned => FatalError.raise(),
}
}
}
}
#[inline(always)]
fn execute_job<Q, const INCR: bool>(
fn execute_job<'tcx, Q, const INCR: bool>(
query: Q,
qcx: Q::Qcx,
state: &QueryState<Q::Key, <Q::Qcx as QueryContext>::QueryInfo>,
state: &QueryState<'tcx, Q::Key>,
key: Q::Key,
key_hash: u64,
id: QueryJobId,
dep_node: Option<DepNode>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
// Use `JobOwner` so the query will be poisoned if executing it panics.
let job_owner = JobOwner { state, key };
@ -480,14 +491,14 @@ where
// Fast path for when incr. comp. is off.
#[inline(always)]
fn execute_job_non_incr<Q>(
fn execute_job_non_incr<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
key: Q::Key,
job_id: QueryJobId,
) -> (Q::Value, DepNodeIndex)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@ -516,7 +527,7 @@ where
}
#[inline(always)]
fn execute_job_incr<Q>(
fn execute_job_incr<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
dep_graph_data: &DepGraphData<<Q::Qcx as HasDepContext>::Deps>,
@ -525,7 +536,7 @@ fn execute_job_incr<Q>(
job_id: QueryJobId,
) -> (Q::Value, DepNodeIndex)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
if !query.anon() && !query.eval_always() {
// `to_dep_node` is expensive for some `DepKind`s.
@ -571,7 +582,7 @@ where
}
#[inline(always)]
fn try_load_from_disk_and_cache_in_memory<Q>(
fn try_load_from_disk_and_cache_in_memory<'tcx, Q>(
query: Q,
dep_graph_data: &DepGraphData<<Q::Qcx as HasDepContext>::Deps>,
qcx: Q::Qcx,
@ -579,7 +590,7 @@ fn try_load_from_disk_and_cache_in_memory<Q>(
dep_node: &DepNode,
) -> Option<(Q::Value, DepNodeIndex)>
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
// Note this function can be called concurrently from the same query
// We must ensure that this is handled correctly.
@ -623,7 +634,7 @@ where
// We always expect to find a cached result for things that
// can be forced from `DepNode`.
debug_assert!(
!query.cache_on_disk(*qcx.dep_context(), key)
!query.will_cache_on_disk_for_key(*qcx.dep_context(), key)
|| !qcx.dep_context().fingerprint_style(dep_node.kind).reconstructible(),
"missing on-disk cache entry for {dep_node:?}"
);
@ -631,7 +642,7 @@ where
// Sanity check for the logic in `ensure`: if the node is green and the result loadable,
// we should actually be able to load it.
debug_assert!(
!query.loadable_from_disk(qcx, key, prev_dep_node_index),
!query.is_loadable_from_disk(qcx, key, prev_dep_node_index),
"missing on-disk cache entry for loadable {dep_node:?}"
);
@ -757,14 +768,14 @@ fn incremental_verify_ich_failed<Tcx>(
///
/// Note: The optimization is only available during incr. comp.
#[inline(never)]
fn ensure_must_run<Q>(
fn ensure_must_run<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
key: &Q::Key,
check_cache: bool,
) -> (bool, Option<DepNode>)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
if query.eval_always() {
return (true, None);
@ -798,7 +809,7 @@ where
return (false, None);
}
let loadable = query.loadable_from_disk(qcx, key, serialized_dep_node_index);
let loadable = query.is_loadable_from_disk(qcx, key, serialized_dep_node_index);
(!loadable, Some(dep_node))
}
@ -809,9 +820,9 @@ pub enum QueryMode {
}
#[inline(always)]
pub fn get_query_non_incr<Q>(query: Q, qcx: Q::Qcx, span: Span, key: Q::Key) -> Q::Value
pub fn get_query_non_incr<'tcx, Q>(query: Q, qcx: Q::Qcx, span: Span, key: Q::Key) -> Q::Value
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@ -819,7 +830,7 @@ where
}
#[inline(always)]
pub fn get_query_incr<Q>(
pub fn get_query_incr<'tcx, Q>(
query: Q,
qcx: Q::Qcx,
span: Span,
@ -827,7 +838,7 @@ pub fn get_query_incr<Q>(
mode: QueryMode,
) -> Option<Q::Value>
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
debug_assert!(qcx.dep_context().dep_graph().is_fully_enabled());
@ -849,9 +860,9 @@ where
Some(result)
}
pub fn force_query<Q>(query: Q, qcx: Q::Qcx, key: Q::Key, dep_node: DepNode)
pub fn force_query<'tcx, Q>(query: Q, qcx: Q::Qcx, key: Q::Key, dep_node: DepNode)
where
Q: QueryDispatcher,
Q: QueryDispatcher<'tcx>,
{
// We may be concurrently trying both execute and force a query.
// Ensure that only one of them runs the query.

View file

@ -1709,7 +1709,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns),
parent_scope,
None,
false,
None,
None,
) else {
@ -2546,7 +2545,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns_to_try),
parent_scope,
None,
false,
ignore_decl,
ignore_import,
)
@ -2650,7 +2648,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ValueNS),
parent_scope,
None,
false,
ignore_decl,
ignore_import,
) {

View file

@ -350,7 +350,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Module(ns, module),
parent_scope,
finalize.map(|finalize| Finalize { used: Used::Scope, ..finalize }),
finalize.is_some(),
ignore_decl,
None,
)
@ -368,7 +367,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns),
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
None,
)
@ -396,12 +394,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
scope_set: ScopeSet<'ra>,
parent_scope: &ParentScope<'ra>,
finalize: Option<Finalize>,
force: bool,
ignore_decl: Option<Decl<'ra>>,
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, Determinacy> {
assert!(force || finalize.is_none()); // `finalize` implies `force`
// Make sure `self`, `super` etc produce an error when passed to here.
if !matches!(scope_set, ScopeSet::Module(..)) && orig_ident.is_path_segment_keyword() {
return Err(Determinacy::Determined);
@ -451,7 +446,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
parent_scope,
// Shadowed decls don't need to be marked as used or non-speculatively loaded.
if innermost_results.is_empty() { finalize } else { None },
force,
ignore_decl,
ignore_import,
) {
@ -509,7 +503,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Scope visiting walked all the scopes and maybe found something in one of them.
match innermost_results.first() {
Some(&(decl, ..)) => Ok(decl),
None => Err(Determinacy::determined(determinacy == Determinacy::Determined || force)),
None => Err(determinacy),
}
}
@ -523,7 +517,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
scope_set: ScopeSet<'ra>,
parent_scope: &ParentScope<'ra>,
finalize: Option<Finalize>,
force: bool,
ignore_decl: Option<Decl<'ra>>,
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, ControlFlow<Determinacy, Determinacy>> {
@ -546,7 +539,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
match self.reborrow().resolve_derive_macro_path(
derive,
parent_scope,
force,
false,
ignore_import,
) {
Ok((Some(ext), _)) => {
@ -617,11 +610,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Ok(decl)
}
Err(ControlFlow::Continue(determinacy)) => Err(determinacy),
Err(ControlFlow::Break(determinacy)) => {
return Err(ControlFlow::Break(Determinacy::determined(
determinacy == Determinacy::Determined || force,
)));
}
Err(ControlFlow::Break(..)) => return decl,
}
}
Scope::ModuleGlobs(module, derive_fallback_lint_id) => {
@ -668,11 +657,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Ok(binding)
}
Err(ControlFlow::Continue(determinacy)) => Err(determinacy),
Err(ControlFlow::Break(determinacy)) => {
return Err(ControlFlow::Break(Determinacy::determined(
determinacy == Determinacy::Determined || force,
)));
}
Err(ControlFlow::Break(..)) => return binding,
}
}
Scope::MacroUsePrelude => match self.macro_use_prelude.get(&ident.name).cloned() {
@ -715,7 +700,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Module(ns, prelude),
parent_scope,
None,
false,
ignore_decl,
ignore_import,
)
@ -951,7 +935,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Module(ns, module),
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
ignore_import,
),
@ -960,7 +943,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::ModuleAndExternPrelude(ns, module),
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
ignore_import,
),
@ -973,7 +955,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::ExternPrelude,
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
ignore_import,
)
@ -996,7 +977,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns),
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
ignore_import,
)
@ -1180,7 +1160,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Module(ns, module),
adjusted_parent_scope,
None,
false,
ignore_decl,
ignore_import,
);
@ -1881,7 +1860,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns),
parent_scope,
finalize,
finalize.is_some(),
ignore_decl,
ignore_import,
)
@ -1957,8 +1935,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
);
}
}
Err(Undetermined) => return PathResult::Indeterminate,
Err(Determined) => {
Err(Undetermined) if finalize.is_none() => return PathResult::Indeterminate,
Err(Determined | Undetermined) => {
if let Some(ModuleOrUniformRoot::Module(module)) = module
&& opt_ns.is_some()
&& !module.is_normal()

View file

@ -1498,7 +1498,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::All(ns),
&import.parent_scope,
None,
false,
decls[ns].get().decl(),
None,
) {

View file

@ -799,10 +799,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Macro(kind),
parent_scope,
None,
force,
None,
None,
);
let binding = binding.map_err(|determinacy| {
Determinacy::determined(determinacy == Determinacy::Determined || force)
});
if let Err(Determinacy::Undetermined) = binding {
return Err(Determinacy::Undetermined);
}
@ -958,7 +960,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Macro(kind),
&parent_scope,
Some(Finalize::new(ast::CRATE_NODE_ID, ident.span)),
true,
None,
None,
) {
@ -1013,7 +1014,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Macro(MacroKind::Attr),
&parent_scope,
Some(Finalize::new(ast::CRATE_NODE_ID, ident.span)),
true,
None,
None,
);
@ -1117,7 +1117,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ScopeSet::Macro(MacroKind::Bang),
&ParentScope { macro_rules: no_macro_rules, ..*parent_scope },
None,
false,
None,
None,
);

View file

@ -1995,6 +1995,7 @@ symbols! {
rustc_no_implicit_autorefs,
rustc_no_implicit_bounds,
rustc_no_mir_inline,
rustc_non_const_trait_method,
rustc_nonnull_optimization_guaranteed,
rustc_nounwind,
rustc_objc_class,

View file

@ -21,7 +21,7 @@ pub(crate) fn target() -> Target {
max_atomic_width: Some(128),
// As documented in https://developer.android.com/ndk/guides/cpu-features.html
// the neon (ASIMD) and FP must exist on all android aarch64 targets.
features: "+v8a,+neon,+outline-atomics".into(),
features: "+v8a,+neon".into(),
// the AAPCS64 expects use of non-leaf frame pointers per
// https://github.com/ARM-software/abi-aa/blob/4492d1570eb70c8fd146623e0db65b2d241f12e7/aapcs64/aapcs64.rst#the-frame-pointer
// and we tend to encounter interesting bugs in AArch64 unwinding code if we do not

View file

@ -3,7 +3,7 @@ use crate::spec::{Arch, Cc, FramePointer, LinkerFlavor, Lld, Target, TargetMetad
pub(crate) fn target() -> Target {
let mut base = base::windows_gnullvm::opts();
base.max_atomic_width = Some(128);
base.features = "+v8a,+neon,+outline-atomics".into();
base.features = "+v8a,+neon".into();
base.linker = Some("aarch64-w64-mingw32-clang".into());
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), &["-m", "arm64pe"]);

View file

@ -3,7 +3,7 @@ use crate::spec::{Arch, FramePointer, Target, TargetMetadata, base};
pub(crate) fn target() -> Target {
let mut base = base::windows_msvc::opts();
base.max_atomic_width = Some(128);
base.features = "+v8a,+neon,+outline-atomics".into();
base.features = "+v8a,+neon".into();
// Microsoft recommends enabling frame pointers on Arm64 Windows.
// From https://learn.microsoft.com/en-us/cpp/build/arm64-windows-abi-conventions?view=msvc-170#integer-registers

View file

@ -15,7 +15,7 @@ pub(crate) fn target() -> Target {
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
arch: Arch::AArch64,
options: TargetOptions {
features: "+v8a,+outline-atomics".into(),
features: "+v8a".into(),
max_atomic_width: Some(128),
stack_probes: StackProbeType::Inline,
supported_sanitizers: SanitizerSet::ADDRESS

View file

@ -5,7 +5,7 @@ use crate::spec::{
pub(crate) fn target() -> Target {
let mut base = base::fuchsia::opts();
base.cpu = "generic".into();
base.features = "+v8a,+crc,+aes,+sha2,+neon,+outline-atomics".into();
base.features = "+v8a,+crc,+aes,+sha2,+neon".into();
base.max_atomic_width = Some(128);
base.stack_probes = StackProbeType::Inline;
base.supported_sanitizers = SanitizerSet::ADDRESS

View file

@ -13,7 +13,7 @@ pub(crate) fn target() -> Target {
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
arch: Arch::AArch64,
options: TargetOptions {
features: "+v8a,+outline-atomics".into(),
features: "+v8a".into(),
max_atomic_width: Some(128),
stack_probes: StackProbeType::Inline,
..base::openbsd::opts()

View file

@ -12,7 +12,7 @@ use rustc_hir::{
};
use rustc_middle::bug;
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, DerefAdjustKind};
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Print, Printer};
use rustc_middle::ty::{
self, GenericArg, GenericArgKind, GenericArgsRef, InferConst, IsSuggestable, Term, TermKind,
@ -615,7 +615,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
// first adjustment was not a builtin deref.
let adjustment = match typeck_results.expr_adjustments(receiver) {
[
Adjustment { kind: Adjust::Deref(None), target: _ },
Adjustment { kind: Adjust::Deref(DerefAdjustKind::Builtin), target: _ },
..,
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), target: _ },
] => "",

View file

@ -277,6 +277,28 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
};
let mut err = struct_span_code_err!(self.dcx(), span, E0277, "{}", err_msg);
let trait_def_id = main_trait_predicate.def_id();
if self.tcx.is_diagnostic_item(sym::From, trait_def_id)
|| self.tcx.is_diagnostic_item(sym::TryFrom, trait_def_id)
{
let found_ty = leaf_trait_predicate.skip_binder().trait_ref.args.type_at(1);
let ty = main_trait_predicate.skip_binder().self_ty();
if let Some(cast_ty) = self.find_explicit_cast_type(
obligation.param_env,
found_ty,
ty,
) {
let found_ty_str = self.tcx.short_string(found_ty, &mut long_ty_file);
let cast_ty_str = self.tcx.short_string(cast_ty, &mut long_ty_file);
err.help(
format!(
"consider casting the `{found_ty_str}` value to `{cast_ty_str}`",
),
);
}
}
*err.long_ty_path() = long_ty_file;
let mut suggested = false;
@ -2930,6 +2952,69 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
})
}
/// If `found_ty` is a reference that can be explicitly cast to another reference type for which
/// a `From` / `TryFrom` impl exists for `self_ty`, return that type.
fn find_explicit_cast_type(
&self,
param_env: ty::ParamEnv<'tcx>,
found_ty: Ty<'tcx>,
self_ty: Ty<'tcx>,
) -> Option<Ty<'tcx>> {
let ty::Ref(region, inner_ty, mutbl) = *found_ty.kind() else {
return None;
};
let mut derefs = (self.autoderef_steps)(inner_ty).into_iter();
derefs.next(); // skip the first one, which is inner_ty itself
let deref_target = derefs.into_iter().next()?.0;
let cast_ty = Ty::new_ref(self.tcx, region, deref_target, mutbl);
let Some(from_def_id) = self.tcx.get_diagnostic_item(sym::From) else {
return None;
};
let Some(try_from_def_id) = self.tcx.get_diagnostic_item(sym::TryFrom) else {
return None;
};
if self.has_impl_for_type(
param_env,
ty::TraitRef::new(
self.tcx,
from_def_id,
self.tcx.mk_args(&[self_ty.into(), cast_ty.into()]),
),
) {
Some(cast_ty)
} else if self.has_impl_for_type(
param_env,
ty::TraitRef::new(
self.tcx,
try_from_def_id,
self.tcx.mk_args(&[self_ty.into(), cast_ty.into()]),
),
) {
Some(cast_ty)
} else {
None
}
}
fn has_impl_for_type(
&self,
param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
) -> bool {
let obligation = Obligation::new(
self.tcx,
ObligationCause::dummy(),
param_env,
ty::TraitPredicate { trait_ref, polarity: ty::PredicatePolarity::Positive },
);
self.predicate_must_hold_modulo_regions(&obligation)
}
fn add_tuple_trait_message(
&self,
obligation_cause_code: &ObligationCauseCode<'tcx>,

View file

@ -649,6 +649,33 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
})
}
/// Removes and returns the greatest item from the binary heap if the predicate
/// returns `true`, or [`None`] if the predicate returns false or the heap
/// is empty (the predicate will not be called in that case).
///
/// # Examples
///
/// ```
/// #![feature(binary_heap_pop_if)]
/// use std::collections::BinaryHeap;
/// let mut heap = BinaryHeap::from([1, 2]);
/// let pred = |x: &i32| *x % 2 == 0;
///
/// assert_eq!(heap.pop_if(pred), Some(2));
/// assert_eq!(heap.as_slice(), [1]);
/// assert_eq!(heap.pop_if(pred), None);
/// assert_eq!(heap.as_slice(), [1]);
/// ```
///
/// # Time complexity
///
/// The worst case cost of `pop_if` on a heap containing *n* elements is *O*(log(*n*)).
#[unstable(feature = "binary_heap_pop_if", issue = "151828")]
pub fn pop_if(&mut self, predicate: impl FnOnce(&T) -> bool) -> Option<T> {
let first = self.peek()?;
if predicate(first) { self.pop() } else { None }
}
/// Pushes an item onto the binary heap.
///
/// # Examples

View file

@ -136,6 +136,18 @@ fn test_peek_and_pop() {
}
}
#[test]
fn test_pop_if() {
let data = vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0];
let mut sorted = data.clone();
sorted.sort();
let mut heap = BinaryHeap::from(data);
while let Some(popped) = heap.pop_if(|x| *x > 2) {
assert_eq!(popped, sorted.pop().unwrap());
}
assert_eq!(heap.into_sorted_vec(), vec![0, 1, 2]);
}
#[test]
fn test_peek_mut() {
let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];

View file

@ -1,4 +1,5 @@
#![feature(allocator_api)]
#![feature(binary_heap_pop_if)]
#![feature(const_heap)]
#![feature(deque_extend_front)]
#![feature(iter_array_chunks)]

View file

@ -1227,8 +1227,9 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> {
/// assert_eq!(format!("{:?}", wrapped), "'a'");
/// ```
#[stable(feature = "fmt_from_fn", since = "1.93.0")]
#[rustc_const_stable(feature = "const_fmt_from_fn", since = "CURRENT_RUSTC_VERSION")]
#[must_use = "returns a type implementing Debug and Display, which do not have any effects unless they are used"]
pub fn from_fn<F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result>(f: F) -> FromFn<F> {
pub const fn from_fn<F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result>(f: F) -> FromFn<F> {
FromFn(f)
}

View file

@ -279,7 +279,8 @@ pub trait FromIterator<A>: Sized {
)]
#[rustc_skip_during_method_dispatch(array, boxed_slice)]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IntoIterator {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
pub const trait IntoIterator {
/// The type of the elements being iterated over.
#[rustc_diagnostic_item = "IntoIteratorItem"]
#[stable(feature = "rust1", since = "1.0.0")]
@ -312,7 +313,8 @@ pub trait IntoIterator {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator> IntoIterator for I {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
impl<I: [const] Iterator> const IntoIterator for I {
type Item = I::Item;
type IntoIter = I;

View file

@ -37,7 +37,8 @@ fn _assert_is_dyn_compatible(_: &dyn Iterator<Item = ()>) {}
#[lang = "iterator"]
#[rustc_diagnostic_item = "Iterator"]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub trait Iterator {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
pub const trait Iterator {
/// The type of the elements being iterated over.
#[rustc_diagnostic_item = "IteratorItem"]
#[stable(feature = "rust1", since = "1.0.0")]
@ -107,6 +108,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "iter_next_chunk", issue = "98326")]
#[rustc_non_const_trait_method]
fn next_chunk<const N: usize>(
&mut self,
) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>>
@ -219,6 +221,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn count(self) -> usize
where
Self: Sized,
@ -251,6 +254,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn last(self) -> Option<Self::Item>
where
Self: Sized,
@ -298,6 +302,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "iter_advance_by", issue = "77404")]
#[rustc_non_const_trait_method]
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
/// Helper trait to specialize `advance_by` via `try_fold` for `Sized` iterators.
trait SpecAdvanceBy {
@ -375,6 +380,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.advance_by(n).ok()?;
self.next()
@ -425,6 +431,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_step_by", since = "1.28.0")]
#[rustc_non_const_trait_method]
fn step_by(self, step: usize) -> StepBy<Self>
where
Self: Sized,
@ -496,6 +503,7 @@ pub trait Iterator {
/// [`OsStr`]: ../../std/ffi/struct.OsStr.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn chain<U>(self, other: U) -> Chain<Self, U::IntoIter>
where
Self: Sized,
@ -614,6 +622,7 @@ pub trait Iterator {
/// [`zip`]: crate::iter::zip
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn zip<U>(self, other: U) -> Zip<Self, U::IntoIter>
where
Self: Sized,
@ -657,6 +666,7 @@ pub trait Iterator {
/// [`intersperse_with`]: Iterator::intersperse_with
#[inline]
#[unstable(feature = "iter_intersperse", issue = "79524")]
#[rustc_non_const_trait_method]
fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
where
Self: Sized,
@ -715,6 +725,7 @@ pub trait Iterator {
/// [`intersperse`]: Iterator::intersperse
#[inline]
#[unstable(feature = "iter_intersperse", issue = "79524")]
#[rustc_non_const_trait_method]
fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where
Self: Sized,
@ -774,6 +785,7 @@ pub trait Iterator {
#[rustc_diagnostic_item = "IteratorMap"]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn map<B, F>(self, f: F) -> Map<Self, F>
where
Self: Sized,
@ -819,6 +831,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_for_each", since = "1.21.0")]
#[rustc_non_const_trait_method]
fn for_each<F>(self, f: F)
where
Self: Sized,
@ -894,6 +907,7 @@ pub trait Iterator {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "iter_filter"]
#[rustc_non_const_trait_method]
fn filter<P>(self, predicate: P) -> Filter<Self, P>
where
Self: Sized,
@ -939,6 +953,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where
Self: Sized,
@ -986,6 +1001,7 @@ pub trait Iterator {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "enumerate_method"]
#[rustc_non_const_trait_method]
fn enumerate(self) -> Enumerate<Self>
where
Self: Sized,
@ -1057,6 +1073,7 @@ pub trait Iterator {
/// [`next`]: Iterator::next
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn peekable(self) -> Peekable<Self>
where
Self: Sized,
@ -1122,6 +1139,7 @@ pub trait Iterator {
#[inline]
#[doc(alias = "drop_while")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where
Self: Sized,
@ -1200,6 +1218,7 @@ pub trait Iterator {
/// the iteration should stop, but wasn't placed back into the iterator.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where
Self: Sized,
@ -1288,6 +1307,7 @@ pub trait Iterator {
/// [`fuse`]: Iterator::fuse
#[inline]
#[stable(feature = "iter_map_while", since = "1.57.0")]
#[rustc_non_const_trait_method]
fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where
Self: Sized,
@ -1317,6 +1337,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn skip(self, n: usize) -> Skip<Self>
where
Self: Sized,
@ -1389,6 +1410,7 @@ pub trait Iterator {
#[doc(alias = "limit")]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn take(self, n: usize) -> Take<Self>
where
Self: Sized,
@ -1436,6 +1458,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where
Self: Sized,
@ -1474,6 +1497,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where
Self: Sized,
@ -1558,6 +1582,7 @@ pub trait Iterator {
/// [`flat_map()`]: Iterator::flat_map
#[inline]
#[stable(feature = "iterator_flatten", since = "1.29.0")]
#[rustc_non_const_trait_method]
fn flatten(self) -> Flatten<Self>
where
Self: Sized,
@ -1714,6 +1739,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "iter_map_windows", issue = "87155")]
#[rustc_non_const_trait_method]
fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where
Self: Sized,
@ -1776,6 +1802,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn fuse(self) -> Fuse<Self>
where
Self: Sized,
@ -1860,6 +1887,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn inspect<F>(self, f: F) -> Inspect<Self, F>
where
Self: Sized,
@ -2019,6 +2047,7 @@ pub trait Iterator {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "if you really need to exhaust the iterator, consider `.for_each(drop)` instead"]
#[rustc_diagnostic_item = "iterator_collect_fn"]
#[rustc_non_const_trait_method]
fn collect<B: FromIterator<Self::Item>>(self) -> B
where
Self: Sized,
@ -2106,6 +2135,7 @@ pub trait Iterator {
/// [`collect`]: Iterator::collect
#[inline]
#[unstable(feature = "iterator_try_collect", issue = "94047")]
#[rustc_non_const_trait_method]
fn try_collect<B>(&mut self) -> ChangeOutputType<Self::Item, B>
where
Self: Sized,
@ -2178,6 +2208,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "iter_collect_into", issue = "94780")]
#[rustc_non_const_trait_method]
fn collect_into<E: Extend<Self::Item>>(self, collection: &mut E) -> &mut E
where
Self: Sized,
@ -2210,6 +2241,7 @@ pub trait Iterator {
/// assert_eq!(odd, [1, 3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn partition<B, F>(self, f: F) -> (B, B)
where
Self: Sized,
@ -2272,6 +2304,7 @@ pub trait Iterator {
/// assert!(a[i..].iter().all(|n| n % 2 == 1)); // odds
/// ```
#[unstable(feature = "iter_partition_in_place", issue = "62543")]
#[rustc_non_const_trait_method]
fn partition_in_place<'a, T: 'a, P>(mut self, ref mut predicate: P) -> usize
where
Self: Sized + DoubleEndedIterator<Item = &'a mut T>,
@ -2329,6 +2362,7 @@ pub trait Iterator {
/// assert!(!"IntoIterator".chars().is_partitioned(char::is_uppercase));
/// ```
#[unstable(feature = "iter_is_partitioned", issue = "62544")]
#[rustc_non_const_trait_method]
fn is_partitioned<P>(mut self, mut predicate: P) -> bool
where
Self: Sized,
@ -2423,6 +2457,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_try_fold", since = "1.27.0")]
#[rustc_non_const_trait_method]
fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
where
Self: Sized,
@ -2481,6 +2516,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_try_fold", since = "1.27.0")]
#[rustc_non_const_trait_method]
fn try_for_each<F, R>(&mut self, f: F) -> R
where
Self: Sized,
@ -2600,6 +2636,7 @@ pub trait Iterator {
#[doc(alias = "inject", alias = "foldl")]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn fold<B, F>(mut self, init: B, mut f: F) -> B
where
Self: Sized,
@ -2637,6 +2674,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_fold_self", since = "1.51.0")]
#[rustc_non_const_trait_method]
fn reduce<F>(mut self, f: F) -> Option<Self::Item>
where
Self: Sized,
@ -2708,6 +2746,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "iterator_try_reduce", issue = "87053")]
#[rustc_non_const_trait_method]
fn try_reduce<R>(
&mut self,
f: impl FnMut(Self::Item, Self::Item) -> R,
@ -2766,6 +2805,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn all<F>(&mut self, f: F) -> bool
where
Self: Sized,
@ -2819,6 +2859,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn any<F>(&mut self, f: F) -> bool
where
Self: Sized,
@ -2892,6 +2933,7 @@ pub trait Iterator {
/// Note that `iter.find(f)` is equivalent to `iter.filter(f).next()`.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where
Self: Sized,
@ -2923,6 +2965,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iterator_find_map", since = "1.30.0")]
#[rustc_non_const_trait_method]
fn find_map<B, F>(&mut self, f: F) -> Option<B>
where
Self: Sized,
@ -2981,6 +3024,7 @@ pub trait Iterator {
/// ```
#[inline]
#[unstable(feature = "try_find", issue = "63178")]
#[rustc_non_const_trait_method]
fn try_find<R>(
&mut self,
f: impl FnMut(&Self::Item) -> R,
@ -3064,6 +3108,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn position<P>(&mut self, predicate: P) -> Option<usize>
where
Self: Sized,
@ -3129,6 +3174,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn rposition<P>(&mut self, predicate: P) -> Option<usize>
where
P: FnMut(Self::Item) -> bool,
@ -3178,6 +3224,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn max(self) -> Option<Self::Item>
where
Self: Sized,
@ -3214,6 +3261,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn min(self) -> Option<Self::Item>
where
Self: Sized,
@ -3236,6 +3284,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iter_cmp_by_key", since = "1.6.0")]
#[rustc_non_const_trait_method]
fn max_by_key<B: Ord, F>(self, f: F) -> Option<Self::Item>
where
Self: Sized,
@ -3269,6 +3318,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iter_max_by", since = "1.15.0")]
#[rustc_non_const_trait_method]
fn max_by<F>(self, compare: F) -> Option<Self::Item>
where
Self: Sized,
@ -3296,6 +3346,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iter_cmp_by_key", since = "1.6.0")]
#[rustc_non_const_trait_method]
fn min_by_key<B: Ord, F>(self, f: F) -> Option<Self::Item>
where
Self: Sized,
@ -3329,6 +3380,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "iter_min_by", since = "1.15.0")]
#[rustc_non_const_trait_method]
fn min_by<F>(self, compare: F) -> Option<Self::Item>
where
Self: Sized,
@ -3366,6 +3418,7 @@ pub trait Iterator {
#[inline]
#[doc(alias = "reverse")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn rev(self) -> Rev<Self>
where
Self: Sized + DoubleEndedIterator,
@ -3402,6 +3455,7 @@ pub trait Iterator {
/// assert_eq!(z, [3, 6]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_non_const_trait_method]
fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where
FromA: Default + Extend<A>,
@ -3433,6 +3487,7 @@ pub trait Iterator {
/// ```
#[stable(feature = "iter_copied", since = "1.36.0")]
#[rustc_diagnostic_item = "iter_copied"]
#[rustc_non_const_trait_method]
fn copied<'a, T>(self) -> Copied<Self>
where
T: Copy + 'a,
@ -3481,6 +3536,7 @@ pub trait Iterator {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "iter_cloned"]
#[rustc_non_const_trait_method]
fn cloned<'a, T>(self) -> Cloned<Self>
where
T: Clone + 'a,
@ -3512,6 +3568,7 @@ pub trait Iterator {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
#[rustc_non_const_trait_method]
fn cycle(self) -> Cycle<Self>
where
Self: Sized + Clone,
@ -3555,6 +3612,7 @@ pub trait Iterator {
/// ```
#[track_caller]
#[unstable(feature = "iter_array_chunks", issue = "100450")]
#[rustc_non_const_trait_method]
fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where
Self: Sized,
@ -3591,6 +3649,7 @@ pub trait Iterator {
/// assert_eq!(sum, -0.0_f32);
/// ```
#[stable(feature = "iter_arith", since = "1.11.0")]
#[rustc_non_const_trait_method]
fn sum<S>(self) -> S
where
Self: Sized,
@ -3623,6 +3682,7 @@ pub trait Iterator {
/// assert_eq!(factorial(5), 120);
/// ```
#[stable(feature = "iter_arith", since = "1.11.0")]
#[rustc_non_const_trait_method]
fn product<P>(self) -> P
where
Self: Sized,
@ -3644,6 +3704,7 @@ pub trait Iterator {
/// assert_eq!([1, 2].iter().cmp([1].iter()), Ordering::Greater);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn cmp<I>(self, other: I) -> Ordering
where
I: IntoIterator<Item = Self::Item>,
@ -3671,6 +3732,7 @@ pub trait Iterator {
/// assert_eq!(xs.into_iter().cmp_by(ys, |x, y| (2 * x).cmp(&y)), Ordering::Greater);
/// ```
#[unstable(feature = "iter_order_by", issue = "64295")]
#[rustc_non_const_trait_method]
fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where
Self: Sized,
@ -3727,6 +3789,7 @@ pub trait Iterator {
/// ```
///
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where
I: IntoIterator,
@ -3763,6 +3826,7 @@ pub trait Iterator {
/// );
/// ```
#[unstable(feature = "iter_order_by", issue = "64295")]
#[rustc_non_const_trait_method]
fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where
Self: Sized,
@ -3796,6 +3860,7 @@ pub trait Iterator {
/// assert_eq!([1].iter().eq([1, 2].iter()), false);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn eq<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3819,6 +3884,7 @@ pub trait Iterator {
/// assert!(xs.iter().eq_by(ys, |x, y| x * x == y));
/// ```
#[unstable(feature = "iter_order_by", issue = "64295")]
#[rustc_non_const_trait_method]
fn eq_by<I, F>(self, other: I, eq: F) -> bool
where
Self: Sized,
@ -3848,6 +3914,7 @@ pub trait Iterator {
/// assert_eq!([1].iter().ne([1, 2].iter()), true);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn ne<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3869,6 +3936,7 @@ pub trait Iterator {
/// assert_eq!([1, 2].iter().lt([1, 2].iter()), false);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn lt<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3890,6 +3958,7 @@ pub trait Iterator {
/// assert_eq!([1, 2].iter().le([1, 2].iter()), true);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn le<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3911,6 +3980,7 @@ pub trait Iterator {
/// assert_eq!([1, 2].iter().gt([1, 2].iter()), false);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn gt<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3932,6 +4002,7 @@ pub trait Iterator {
/// assert_eq!([1, 2].iter().ge([1, 2].iter()), true);
/// ```
#[stable(feature = "iter_order", since = "1.5.0")]
#[rustc_non_const_trait_method]
fn ge<I>(self, other: I) -> bool
where
I: IntoIterator,
@ -3961,6 +4032,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "is_sorted", since = "1.82.0")]
#[rustc_non_const_trait_method]
fn is_sorted(self) -> bool
where
Self: Sized,
@ -3987,6 +4059,7 @@ pub trait Iterator {
/// assert!(std::iter::empty::<i32>().is_sorted_by(|a, b| true));
/// ```
#[stable(feature = "is_sorted", since = "1.82.0")]
#[rustc_non_const_trait_method]
fn is_sorted_by<F>(mut self, compare: F) -> bool
where
Self: Sized,
@ -4031,6 +4104,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "is_sorted", since = "1.82.0")]
#[rustc_non_const_trait_method]
fn is_sorted_by_key<F, K>(self, f: F) -> bool
where
Self: Sized,
@ -4046,6 +4120,7 @@ pub trait Iterator {
#[inline]
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
#[rustc_non_const_trait_method]
unsafe fn __iterator_get_unchecked(&mut self, _idx: usize) -> Self::Item
where
Self: TrustedRandomAccessNoCoerce,

View file

@ -29,7 +29,7 @@ use crate::{mem, ptr};
/// mem::forget(boxed); // <-- this is UB!
/// ```
///
/// Even though the `Box`e's destructor is not run (and thus we don't have a double free bug), this
/// Even though the `Box`'s destructor is not run (and thus we don't have a double free bug), this
/// code is still UB. This is because when moving `boxed` into `forget`, its validity invariants
/// are asserted, causing UB since the `Box` is dangling. The safety comment is as such wrong, as
/// moving the `boxed` variable as part of the `forget` call *is* a use.

View file

@ -773,7 +773,7 @@ impl<T: Copy> Bound<&T> {
/// ```
#[unstable(feature = "bound_copied", issue = "145966")]
#[must_use]
pub fn copied(self) -> Bound<T> {
pub const fn copied(self) -> Bound<T> {
match self {
Bound::Unbounded => Bound::Unbounded,
Bound::Included(x) => Bound::Included(*x),

View file

@ -584,7 +584,7 @@
use crate::clone::TrivialClone;
use crate::iter::{self, FusedIterator, TrustedLen};
use crate::marker::Destruct;
use crate::ops::{self, ControlFlow, Deref, DerefMut};
use crate::ops::{self, ControlFlow, Deref, DerefMut, Residual, Try};
use crate::panicking::{panic, panic_display};
use crate::pin::Pin;
use crate::{cmp, convert, hint, mem, slice};
@ -1816,6 +1816,49 @@ impl<T> Option<T> {
unsafe { self.as_mut().unwrap_unchecked() }
}
/// If the option is `None`, calls the closure and inserts its output if successful.
///
/// If the closure returns a residual value such as `Err` or `None`,
/// that residual value is returned and nothing is inserted.
///
/// If the option is `Some`, nothing is inserted.
///
/// Unless a residual is returned, a mutable reference to the value
/// of the option will be output.
///
/// # Examples
///
/// ```
/// #![feature(option_get_or_try_insert_with)]
/// let mut o1: Option<u32> = None;
/// let mut o2: Option<u8> = None;
///
/// let number = "12345";
///
/// assert_eq!(o1.get_or_try_insert_with(|| number.parse()).copied(), Ok(12345));
/// assert!(o2.get_or_try_insert_with(|| number.parse()).is_err());
/// assert_eq!(o1, Some(12345));
/// assert_eq!(o2, None);
/// ```
#[inline]
#[unstable(feature = "option_get_or_try_insert_with", issue = "143648")]
pub fn get_or_try_insert_with<'a, R, F>(
&'a mut self,
f: F,
) -> <R::Residual as Residual<&'a mut T>>::TryType
where
F: FnOnce() -> R,
R: Try<Output = T, Residual: Residual<&'a mut T>>,
{
if let None = self {
*self = Some(f()?);
}
// SAFETY: a `None` variant for `self` would have been replaced by a `Some`
// variant in the code above.
Try::from_output(unsafe { self.as_mut().unwrap_unchecked() })
}
/////////////////////////////////////////////////////////////////////////
// Misc
/////////////////////////////////////////////////////////////////////////
@ -2257,7 +2300,8 @@ impl<T> const Default for Option<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IntoIterator for Option<T> {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
impl<T> const IntoIterator for Option<T> {
type Item = T;
type IntoIter = IntoIter<T>;
@ -2429,7 +2473,8 @@ struct Item<A> {
opt: Option<A>,
}
impl<A> Iterator for Item<A> {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
impl<A> const Iterator for Item<A> {
type Item = A;
#[inline]
@ -2439,7 +2484,7 @@ impl<A> Iterator for Item<A> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
let len = self.opt.len();
(len, Some(len))
}
}
@ -2563,7 +2608,8 @@ pub struct IntoIter<A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Iterator for IntoIter<A> {
#[rustc_const_unstable(feature = "const_iter", issue = "92476")]
impl<A> const Iterator for IntoIter<A> {
type Item = A;
#[inline]

View file

@ -13,7 +13,12 @@ use crate::{cmp, fmt, hash, mem, num};
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(transparent)]
pub struct Alignment(AlignmentEnum);
pub struct Alignment {
// This field is never used directly (nor is the enum),
// as it's just there to convey the validity invariant.
// (Hopefully it'll eventually be a pattern type instead.)
_inner_repr_trick: AlignmentEnum,
}
// Alignment is `repr(usize)`, but via extra steps.
const _: () = assert!(size_of::<Alignment>() == size_of::<usize>());
@ -37,7 +42,7 @@ impl Alignment {
/// assert_eq!(Alignment::MIN.as_usize(), 1);
/// ```
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
pub const MIN: Self = Self(AlignmentEnum::_Align1Shl0);
pub const MIN: Self = Self::new(1).unwrap();
/// Returns the alignment for a type.
///
@ -166,7 +171,10 @@ impl Alignment {
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
#[inline]
pub const fn as_usize(self) -> usize {
self.0 as usize
// Going through `as_nonzero` helps this be more clearly the inverse of
// `new_unchecked`, letting MIR optimizations fold it away.
self.as_nonzero().get()
}
/// Returns the alignment as a <code>[NonZero]<[usize]></code>.

View file

@ -910,29 +910,7 @@ where
R: [const] ops::RangeBounds<usize> + [const] Destruct,
{
let len = bounds.end;
let end = match range.end_bound() {
ops::Bound::Included(&end) if end >= len => slice_index_fail(0, end, len),
// Cannot overflow because `end < len` implies `end < usize::MAX`.
ops::Bound::Included(&end) => end + 1,
ops::Bound::Excluded(&end) if end > len => slice_index_fail(0, end, len),
ops::Bound::Excluded(&end) => end,
ops::Bound::Unbounded => len,
};
let start = match range.start_bound() {
ops::Bound::Excluded(&start) if start >= end => slice_index_fail(start, end, len),
// Cannot overflow because `start < end` implies `start < usize::MAX`.
ops::Bound::Excluded(&start) => start + 1,
ops::Bound::Included(&start) if start > end => slice_index_fail(start, end, len),
ops::Bound::Included(&start) => start,
ops::Bound::Unbounded => 0,
};
ops::Range { start, end }
into_slice_range(len, (range.start_bound().copied(), range.end_bound().copied()))
}
/// Performs bounds checking of a range without panicking.
@ -972,20 +950,8 @@ where
R: ops::RangeBounds<usize>,
{
let len = bounds.end;
let start = match range.start_bound() {
ops::Bound::Included(&start) => start,
ops::Bound::Excluded(start) => start.checked_add(1)?,
ops::Bound::Unbounded => 0,
};
let end = match range.end_bound() {
ops::Bound::Included(end) => end.checked_add(1)?,
ops::Bound::Excluded(&end) => end,
ops::Bound::Unbounded => len,
};
if start > end || end > len { None } else { Some(ops::Range { start, end }) }
let r = into_range(len, (range.start_bound().copied(), range.end_bound().copied()))?;
if r.start > r.end || r.end > len { None } else { Some(r) }
}
/// Converts a pair of `ops::Bound`s into `ops::Range` without performing any
@ -1011,6 +977,7 @@ pub(crate) const fn into_range_unchecked(
/// Converts pair of `ops::Bound`s into `ops::Range`.
/// Returns `None` on overflowing indices.
#[rustc_const_unstable(feature = "const_range", issue = "none")]
#[inline]
pub(crate) const fn into_range(
len: usize,
(start, end): (ops::Bound<usize>, ops::Bound<usize>),
@ -1036,7 +1003,8 @@ pub(crate) const fn into_range(
/// Converts pair of `ops::Bound`s into `ops::Range`.
/// Panics on overflowing indices.
pub(crate) fn into_slice_range(
#[inline]
pub(crate) const fn into_slice_range(
len: usize,
(start, end): (ops::Bound<usize>, ops::Bound<usize>),
) -> ops::Range<usize> {

View file

@ -3939,6 +3939,219 @@ impl<T> [T] {
}
}
/// Moves the elements of this slice `N` places to the left, returning the ones
/// that "fall off" the front, and putting `inserted` at the end.
///
/// Equivalently, you can think of concatenating `self` and `inserted` into one
/// long sequence, then returning the left-most `N` items and the rest into `self`:
///
/// ```text
/// self (before) inserted
/// vvvvvvvvvvvvvvv vvv
/// [1, 2, 3, 4, 5] [9]
/// ↙ ↙ ↙ ↙ ↙ ↙
/// [1] [2, 3, 4, 5, 9]
/// ^^^ ^^^^^^^^^^^^^^^
/// returned self (after)
/// ```
///
/// See also [`Self::shift_right`] and compare [`Self::rotate_left`].
///
/// # Examples
///
/// ```
/// #![feature(slice_shift)]
///
/// // Same as the diagram above
/// let mut a = [1, 2, 3, 4, 5];
/// let inserted = [9];
/// let returned = a.shift_left(inserted);
/// assert_eq!(returned, [1]);
/// assert_eq!(a, [2, 3, 4, 5, 9]);
///
/// // You can shift multiple items at a time
/// let mut a = *b"Hello world";
/// assert_eq!(a.shift_left(*b" peace"), *b"Hello ");
/// assert_eq!(a, *b"world peace");
///
/// // The name comes from this operation's similarity to bitshifts
/// let mut a: u8 = 0b10010110;
/// a <<= 3;
/// assert_eq!(a, 0b10110000_u8);
/// let mut a: [_; 8] = [1, 0, 0, 1, 0, 1, 1, 0];
/// a.shift_left([0; 3]);
/// assert_eq!(a, [1, 0, 1, 1, 0, 0, 0, 0]);
///
/// // Remember you can sub-slice to affect less that the whole slice.
/// // For example, this is similar to `.remove(1)` + `.insert(4, 'Z')`
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// assert_eq!(a[1..=4].shift_left(['Z']), ['b']);
/// assert_eq!(a, ['a', 'c', 'd', 'e', 'Z', 'f']);
///
/// // If the size matches it's equivalent to `mem::replace`
/// let mut a = [1, 2, 3];
/// assert_eq!(a.shift_left([7, 8, 9]), [1, 2, 3]);
/// assert_eq!(a, [7, 8, 9]);
///
/// // Some of the "inserted" elements end up returned if the slice is too short
/// let mut a = [];
/// assert_eq!(a.shift_left([1, 2, 3]), [1, 2, 3]);
/// let mut a = [9];
/// assert_eq!(a.shift_left([1, 2, 3]), [9, 1, 2]);
/// assert_eq!(a, [3]);
/// ```
#[unstable(feature = "slice_shift", issue = "151772")]
pub const fn shift_left<const N: usize>(&mut self, inserted: [T; N]) -> [T; N] {
if let Some(shift) = self.len().checked_sub(N) {
// SAFETY: Having just checked that the inserted/returned arrays are
// shorter than (or the same length as) the slice:
// 1. The read for the items to return is in-bounds
// 2. We can `memmove` the slice over to cover the items we're returning
// to ensure those aren't double-dropped
// 3. Then we write (in-bounds for the same reason as the read) the
// inserted items atop the items of the slice that we just duplicated
//
// And none of this can panic, so there's no risk of intermediate unwinds.
unsafe {
let ptr = self.as_mut_ptr();
let returned = ptr.cast_array::<N>().read();
ptr.copy_from(ptr.add(N), shift);
ptr.add(shift).cast_array::<N>().write(inserted);
returned
}
} else {
// SAFETY: Having checked that the slice is strictly shorter than the
// inserted/returned arrays, it means we'll be copying the whole slice
// into the returned array, but that's not enough on its own. We also
// need to copy some of the inserted array into the returned array,
// with the rest going into the slice. Because `&mut` is exclusive
// and we own both `inserted` and `returned`, they're all disjoint
// allocations from each other as we can use `nonoverlapping` copies.
//
// We avoid double-frees by `ManuallyDrop`ing the inserted items,
// since we always copy them to other locations that will drop them
// instead. Plus nothing in here can panic -- it's just memcpy three
// times -- so there's no intermediate unwinds to worry about.
unsafe {
let len = self.len();
let slice = self.as_mut_ptr();
let inserted = mem::ManuallyDrop::new(inserted);
let inserted = (&raw const inserted).cast::<T>();
let mut returned = MaybeUninit::<[T; N]>::uninit();
let ptr = returned.as_mut_ptr().cast::<T>();
ptr.copy_from_nonoverlapping(slice, len);
ptr.add(len).copy_from_nonoverlapping(inserted, N - len);
slice.copy_from_nonoverlapping(inserted.add(N - len), len);
returned.assume_init()
}
}
}
/// Moves the elements of this slice `N` places to the right, returning the ones
/// that "fall off" the back, and putting `inserted` at the beginning.
///
/// Equivalently, you can think of concatenating `inserted` and `self` into one
/// long sequence, then returning the right-most `N` items and the rest into `self`:
///
/// ```text
/// inserted self (before)
/// vvv vvvvvvvvvvvvvvv
/// [0] [5, 6, 7, 8, 9]
/// ↘ ↘ ↘ ↘ ↘ ↘
/// [0, 5, 6, 7, 8] [9]
/// ^^^^^^^^^^^^^^^ ^^^
/// self (after) returned
/// ```
///
/// See also [`Self::shift_left`] and compare [`Self::rotate_right`].
///
/// # Examples
///
/// ```
/// #![feature(slice_shift)]
///
/// // Same as the diagram above
/// let mut a = [5, 6, 7, 8, 9];
/// let inserted = [0];
/// let returned = a.shift_right(inserted);
/// assert_eq!(returned, [9]);
/// assert_eq!(a, [0, 5, 6, 7, 8]);
///
/// // The name comes from this operation's similarity to bitshifts
/// let mut a: u8 = 0b10010110;
/// a >>= 3;
/// assert_eq!(a, 0b00010010_u8);
/// let mut a: [_; 8] = [1, 0, 0, 1, 0, 1, 1, 0];
/// a.shift_right([0; 3]);
/// assert_eq!(a, [0, 0, 0, 1, 0, 0, 1, 0]);
///
/// // Remember you can sub-slice to affect less that the whole slice.
/// // For example, this is similar to `.remove(4)` + `.insert(1, 'Z')`
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// assert_eq!(a[1..=4].shift_right(['Z']), ['e']);
/// assert_eq!(a, ['a', 'Z', 'b', 'c', 'd', 'f']);
///
/// // If the size matches it's equivalent to `mem::replace`
/// let mut a = [1, 2, 3];
/// assert_eq!(a.shift_right([7, 8, 9]), [1, 2, 3]);
/// assert_eq!(a, [7, 8, 9]);
///
/// // Some of the "inserted" elements end up returned if the slice is too short
/// let mut a = [];
/// assert_eq!(a.shift_right([1, 2, 3]), [1, 2, 3]);
/// let mut a = [9];
/// assert_eq!(a.shift_right([1, 2, 3]), [2, 3, 9]);
/// assert_eq!(a, [1]);
/// ```
#[unstable(feature = "slice_shift", issue = "151772")]
pub const fn shift_right<const N: usize>(&mut self, inserted: [T; N]) -> [T; N] {
if let Some(shift) = self.len().checked_sub(N) {
// SAFETY: Having just checked that the inserted/returned arrays are
// shorter than (or the same length as) the slice:
// 1. The read for the items to return is in-bounds
// 2. We can `memmove` the slice over to cover the items we're returning
// to ensure those aren't double-dropped
// 3. Then we write (in-bounds for the same reason as the read) the
// inserted items atop the items of the slice that we just duplicated
//
// And none of this can panic, so there's no risk of intermediate unwinds.
unsafe {
let ptr = self.as_mut_ptr();
let returned = ptr.add(shift).cast_array::<N>().read();
ptr.add(N).copy_from(ptr, shift);
ptr.cast_array::<N>().write(inserted);
returned
}
} else {
// SAFETY: Having checked that the slice is strictly shorter than the
// inserted/returned arrays, it means we'll be copying the whole slice
// into the returned array, but that's not enough on its own. We also
// need to copy some of the inserted array into the returned array,
// with the rest going into the slice. Because `&mut` is exclusive
// and we own both `inserted` and `returned`, they're all disjoint
// allocations from each other as we can use `nonoverlapping` copies.
//
// We avoid double-frees by `ManuallyDrop`ing the inserted items,
// since we always copy them to other locations that will drop them
// instead. Plus nothing in here can panic -- it's just memcpy three
// times -- so there's no intermediate unwinds to worry about.
unsafe {
let len = self.len();
let slice = self.as_mut_ptr();
let inserted = mem::ManuallyDrop::new(inserted);
let inserted = (&raw const inserted).cast::<T>();
let mut returned = MaybeUninit::<[T; N]>::uninit();
let ptr = returned.as_mut_ptr().cast::<T>();
ptr.add(N - len).copy_from_nonoverlapping(slice, len);
ptr.copy_from_nonoverlapping(inserted.add(len), N - len);
slice.copy_from_nonoverlapping(inserted, len);
returned.assume_init()
}
}
}
/// Fills `self` with elements by cloning `value`.
///
/// # Examples

View file

@ -99,3 +99,18 @@ pub fn extend_for_unit() {
}
assert_eq!(x, 5);
}
#[test]
pub fn test_const_iter() {
const X: bool = {
let it = Some(42);
let mut run = false;
#[expect(for_loops_over_fallibles)]
for x in it {
assert!(x == 42);
run = true;
}
run
};
assert_eq!(true, X);
}

View file

@ -27,6 +27,7 @@
#![feature(const_drop_in_place)]
#![feature(const_eval_select)]
#![feature(const_index)]
#![feature(const_iter)]
#![feature(const_ops)]
#![feature(const_option_ops)]
#![feature(const_ref_cell)]
@ -101,6 +102,7 @@
#![feature(slice_index_methods)]
#![feature(slice_internals)]
#![feature(slice_partition_dedup)]
#![feature(slice_shift)]
#![feature(slice_split_once)]
#![feature(sliceindex_wrappers)]
#![feature(split_array)]

View file

@ -2507,3 +2507,41 @@ fn test_slice_from_raw_parts_in_const() {
assert_eq!(EMPTY_SLICE.as_ptr().addr(), 123456);
assert_eq!(EMPTY_SLICE.len(), 0);
}
#[test]
fn test_shift_left() {
#[track_caller]
fn case<const M: usize, const N: usize>(
mut a: [i32; M],
i: [i32; N],
j: [i32; N],
b: [i32; M],
) {
assert_eq!((a.shift_left(i), a), (j, b));
}
case([], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], []);
case([1], [2, 3, 4, 5], [1, 2, 3, 4], [5]);
case([1, 2], [3, 4, 5], [1, 2, 3], [4, 5]);
case([1, 2, 3], [4, 5], [1, 2], [3, 4, 5]);
case([1, 2, 3, 4], [5], [1], [2, 3, 4, 5]);
case([1, 2, 3, 4, 5], [], [], [1, 2, 3, 4, 5]);
}
#[test]
fn test_shift_right() {
#[track_caller]
fn case<const M: usize, const N: usize>(
i: [i32; N],
mut a: [i32; M],
b: [i32; M],
j: [i32; N],
) {
assert_eq!((a.shift_right(i), a), (j, b));
}
case([], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], []);
case([1], [2, 3, 4, 5], [1, 2, 3, 4], [5]);
case([1, 2], [3, 4, 5], [1, 2, 3], [4, 5]);
case([1, 2, 3], [4, 5], [1, 2], [3, 4, 5]);
case([1, 2, 3, 4], [5], [1], [2, 3, 4, 5]);
case([1, 2, 3, 4, 5], [], [], [1, 2, 3, 4, 5]);
}

View file

@ -1299,19 +1299,19 @@ impl Step for Tidy {
/// for the `dev` or `nightly` channels.
fn run(self, builder: &Builder<'_>) {
let mut cmd = builder.tool_cmd(Tool::Tidy);
cmd.arg(&builder.src);
cmd.arg(&builder.initial_cargo);
cmd.arg(&builder.out);
cmd.arg(format!("--root-path={}", &builder.src.display()));
cmd.arg(format!("--cargo-path={}", &builder.initial_cargo.display()));
cmd.arg(format!("--output-dir={}", &builder.out.display()));
// Tidy is heavily IO constrained. Still respect `-j`, but use a higher limit if `jobs` hasn't been configured.
let jobs = builder.config.jobs.unwrap_or_else(|| {
8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32
});
cmd.arg(jobs.to_string());
cmd.arg(format!("--concurrency={jobs}"));
// pass the path to the yarn command used for installing js deps.
if let Some(yarn) = &builder.config.yarn {
cmd.arg(yarn);
cmd.arg(format!("--npm-path={}", yarn.display()));
} else {
cmd.arg("yarn");
cmd.arg("--npm-path=yarn");
}
if builder.is_verbose() {
cmd.arg("--verbose");

View file

@ -14,7 +14,7 @@ jobs:
if: github.repository == 'rust-lang/rustc-dev-guide'
runs-on: ubuntu-latest
env:
MDBOOK_VERSION: 0.5.1
MDBOOK_VERSION: 0.5.2
MDBOOK_LINKCHECK2_VERSION: 0.11.0
MDBOOK_MERMAID_VERSION: 0.17.0
MDBOOK_OUTPUT__LINKCHECK__FOLLOW_WEB_LINKS: ${{ github.event_name != 'pull_request' }}

View file

@ -57,11 +57,39 @@ cache-timeout = 90000
warning-policy = "error"
[output.html.redirect]
"/borrow_check.html" = "borrow-check.html"
"/borrow_check/drop_check.html" = "/borrow-check/drop-check.html"
"/borrow_check/moves_and_initialization.html" = "/borrow-check/moves-and-initialization.html"
"/borrow_check/moves_and_initialization/move_paths.html" = "/borrow-check/moves-and-initialization/move-paths.html"
"/borrow_check/opaque-types-region-inference-restrictions.html" = "/borrow-check/opaque-types-region-inference-restrictions.html"
"/borrow_check/region_inference.html" = "/borrow-check/region-inference.html"
"/borrow_check/region_inference/closure_constraints.html" = "/borrow-check/region-inference/closure-constraints.html"
"/borrow_check/region_inference/constraint_propagation.html" = "/borrow-check/region-inference/constraint-propagation.html"
"/borrow_check/region_inference/error_reporting.html" = "/borrow-check/region-inference/error-reporting.html"
"/borrow_check/region_inference/lifetime_parameters.html" = "/borrow-check/region-inference/lifetime-parameters.html"
"/borrow_check/region_inference/member_constraints.html" = "/borrow-check/region-inference/member-constraints.html"
"/borrow_check/region_inference/placeholders_and_universes.html" = "/borrow-check/region-inference/placeholders-and-universes.html"
"/borrow_check/two_phase_borrows.html" = "/borrow-check/two-phase-borrows.html"
"/borrow_check/type_check.html" = "/borrow-check/type-check.html"
"/compiletest.html" = "tests/compiletest.html"
"/diagnostics/sessiondiagnostic.html" = "diagnostic-structs.html"
"/diagnostics/diagnostic-codes.html" = "error-codes.html"
"/diagnostics/sessiondiagnostic.html" = "diagnostic-structs.html"
"/early_late_parameters.html" = "early-late-parameters.html"
"/generic_parameters_summary.html" = "generic-parameters-summary.html"
"/implementing_new_features.html" = "implementing-new-features.html"
"/miri.html" = "const-eval/interpret.html"
"/profiling/with_perf.html" = "with-perf.html"
"/profiling/with_rustc_perf.html" = "with-rustc-perf.html"
"/profiling/wpa_profiling.html" = "wpa-profiling.html"
"/stabilization_guide.html" = "stabilization-guide.html"
"/stabilization_report_template.html" = "stabilization-report-template.html"
"/tests/fuchsia.html" = "ecosystem-test-jobs/fuchsia.html"
"/tests/headers.html" = "directives.html"
"/tests/integration.html" = "ecosystem.html"
"/tests/rust-for-linux.html" = "ecosystem-test-jobs/rust-for-linux.html"
"/ty_module/binders.html" = "/ty-module/binders.html"
"/ty_module/early_binder.html" = "/ty-module/early-binder.html"
"/ty_module/generic_arguments.html" = "/ty-module/generic-arguments.html"
"/ty_module/instantiating_binders.html" = "/ty-module/instantiating-binders.html"
"/ty_module/param_ty_const_regions.html" = "/ty-module/param-ty-const-regions.html"
"/typing_parameter_envs.html" = "typing-parameter-envs.html"

View file

@ -24,7 +24,7 @@ static REGEX_IGNORE_END: LazyLock<Regex> =
static REGEX_IGNORE_LINK_TARGETS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\[.+\]: ").unwrap());
static REGEX_SPLIT: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"([^\.\d\-\*]\.|[^r]\?|!)\s").unwrap());
LazyLock::new(|| Regex::new(r"([^\.\d\-\*]\.|[^r\~]\?|!)\s").unwrap());
// list elements, numbered (1.) or not (- and *)
static REGEX_LIST_ENTRY: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\s*(\d\.|\-|\*|\d\))\s+").unwrap());
@ -83,6 +83,8 @@ fn ignore(line: &str, in_code_block: bool) -> bool {
|| line.contains(" etc.")
|| line.contains("i.e.")
|| line.contains("et. al")
|| line.contains("<!--")
|| line.contains("-->")
|| line.contains('|')
|| line.trim_start().starts_with('>')
|| line.starts_with('#')
@ -204,6 +206,7 @@ git log main.. compiler
o? whatever
r? @reviewer
r? @reviewer
~? diagnostic
";
let expected = "
# some. heading
@ -236,6 +239,7 @@ o?
whatever
r? @reviewer
r? @reviewer
~? diagnostic
";
assert_eq!(expected, comply(original));
}
@ -263,6 +267,11 @@ leave the
text alone
```
<!-- ignore
html comment opening
--> ignore
html comment closing
handle the
indented well
@ -289,6 +298,11 @@ leave the
text alone
```
<!-- ignore
html comment opening
--> ignore
html comment closing
handle the indented well
[a target]: https://example.com

View file

@ -1 +1 @@
44a5b55557c26353f388400d7da95527256fe260
370143facfb348ad3b29749c0393402d76b280c3

View file

@ -39,9 +39,9 @@
- [Debugging the compiler](./compiler-debugging.md)
- [Using the tracing/logging instrumentation](./tracing.md)
- [Profiling the compiler](./profiling.md)
- [with the linux perf tool](./profiling/with_perf.md)
- [with Windows Performance Analyzer](./profiling/wpa_profiling.md)
- [with the Rust benchmark suite](./profiling/with_rustc_perf.md)
- [with the linux perf tool](./profiling/with-perf.md)
- [with Windows Performance Analyzer](./profiling/wpa-profiling.md)
- [with the Rust benchmark suite](./profiling/with-rustc-perf.md)
- [crates.io dependencies](./crates-io.md)
# Contributing to Rust
@ -51,11 +51,11 @@
- [Using Git](./git.md)
- [Mastering @rustbot](./rustbot.md)
- [Walkthrough: a typical contribution](./walkthrough.md)
- [Implementing new language features](./implementing_new_features.md)
- [Implementing new language features](./implementing-new-features.md)
- [Stability guarantees](./stability-guarantees.md)
- [Stability attributes](./stability.md)
- [Stabilizing language features](./stabilization_guide.md)
- [Stabilization report template](./stabilization_report_template.md)
- [Stabilizing language features](./stabilization-guide.md)
- [Stabilization report template](./stabilization-report-template.md)
- [Feature Gates](./feature-gates.md)
- [Coding conventions](./conventions.md)
- [Procedures for breaking changes](./bug-fix-procedure.md)
@ -106,6 +106,7 @@
- [GPU offload internals](./offload/internals.md)
- [Installation](./offload/installation.md)
- [Usage](./offload/usage.md)
- [Contributing](./offload/contributing.md)
- [Autodiff internals](./autodiff/internals.md)
- [Installation](./autodiff/installation.md)
- [How to debug](./autodiff/debugging.md)
@ -154,17 +155,17 @@
# Analysis
- [Prologue](./part-4-intro.md)
- [Generic parameter definitions](./generic_parameters_summary.md)
- [`EarlyBinder` and instantiating parameters](./ty_module/early_binder.md)
- [Binders and Higher ranked regions](./ty_module/binders.md)
- [Instantiating binders](./ty_module/instantiating_binders.md)
- [Early vs Late bound parameters](./early_late_parameters.md)
- [Generic parameter definitions](./generic-parameters-summary.md)
- [`EarlyBinder` and instantiating parameters](./ty-module/early-binder.md)
- [Binders and Higher ranked regions](./ty-module/binders.md)
- [Instantiating binders](./ty-module/instantiating-binders.md)
- [Early vs Late bound parameters](./early-late-parameters.md)
- [The `ty` module: representing types](./ty.md)
- [ADTs and Generic Arguments](./ty_module/generic_arguments.md)
- [Parameter types/consts/regions](./ty_module/param_ty_const_regions.md)
- [ADTs and Generic Arguments](./ty-module/generic-arguments.md)
- [Parameter types/consts/regions](./ty-module/param-ty-const-regions.md)
- [`TypeFolder` and `TypeFoldable`](./ty-fold.md)
- [Aliases and Normalization](./normalization.md)
- [Typing/Param Envs](./typing_parameter_envs.md)
- [Typing/Param Envs](./typing-parameter-envs.md)
- [Type inference](./type-inference.md)
- [Trait solving](./traits/resolution.md)
- [Higher-ranked trait bounds](./traits/hrtb.md)
@ -197,25 +198,25 @@
- [Opaque types](./opaque-types-type-alias-impl-trait.md)
- [Inference details](./opaque-types-impl-trait-inference.md)
- [Return Position Impl Trait In Trait](./return-position-impl-trait-in-trait.md)
- [Region inference restrictions][opaque-infer]
- [Const condition checking](./effects.md)
- [Region inference restrictions](./borrow-check/opaque-types-region-inference-restrictions.md)
- [Const traits and const condition checking](./effects.md)
- [Pattern and exhaustiveness checking](./pat-exhaustive-checking.md)
- [Unsafety checking](./unsafety-checking.md)
- [MIR dataflow](./mir/dataflow.md)
- [Drop elaboration](./mir/drop-elaboration.md)
- [The borrow checker](./borrow_check.md)
- [Tracking moves and initialization](./borrow_check/moves_and_initialization.md)
- [Move paths](./borrow_check/moves_and_initialization/move_paths.md)
- [MIR type checker](./borrow_check/type_check.md)
- [Drop check](./borrow_check/drop_check.md)
- [Region inference](./borrow_check/region_inference.md)
- [Constraint propagation](./borrow_check/region_inference/constraint_propagation.md)
- [Lifetime parameters](./borrow_check/region_inference/lifetime_parameters.md)
- [Member constraints](./borrow_check/region_inference/member_constraints.md)
- [Placeholders and universes][pau]
- [Closure constraints](./borrow_check/region_inference/closure_constraints.md)
- [Error reporting](./borrow_check/region_inference/error_reporting.md)
- [Two-phase-borrows](./borrow_check/two_phase_borrows.md)
- [The borrow checker](./borrow-check.md)
- [Tracking moves and initialization](./borrow-check/moves-and-initialization.md)
- [Move paths](./borrow-check/moves-and-initialization/move-paths.md)
- [MIR type checker](./borrow-check/type-check.md)
- [Drop check](./borrow-check/drop-check.md)
- [Region inference](./borrow-check/region-inference.md)
- [Constraint propagation](./borrow-check/region-inference/constraint-propagation.md)
- [Lifetime parameters](./borrow-check/region-inference/lifetime-parameters.md)
- [Member constraints](./borrow-check/region-inference/member-constraints.md)
- [Placeholders and universes](./borrow-check/region-inference/placeholders-and-universes.md)
- [Closure constraints](./borrow-check/region-inference/closure-constraints.md)
- [Error reporting](./borrow-check/region-inference/error-reporting.md)
- [Two-phase-borrows](./borrow-check/two-phase-borrows.md)
- [Closure capture inference](./closure.md)
- [Async closures/"coroutine-closures"](coroutine-closures.md)
@ -263,8 +264,3 @@
[Appendix E: Bibliography](./appendix/bibliography.md)
[Appendix Z: HumorRust](./appendix/humorust.md)
---
[pau]: ./borrow_check/region_inference/placeholders_and_universes.md
[opaque-infer]: ./borrow_check/opaque-types-region-inference-restrictions.md

View file

@ -243,8 +243,7 @@ use in lambda calculus evaluation (see [this Wikipedia article][wikideb] for
more). In `rustc`, we use de Bruijn indices to [represent generic types][sub].
[wikideb]: https://en.wikipedia.org/wiki/De_Bruijn_index
[sub]: ../ty_module/generic_arguments.md
[sub]: ../ty-module/generic-arguments.md
Here is a basic example of how de Bruijn indices might be used for closures (we
don't actually do this in `rustc` though!):

View file

@ -39,5 +39,5 @@ Item | Kind | Short description | Chapter |
[Emitting Diagnostics]: ../diagnostics.html
[Macro expansion]: ../macro-expansion.html
[Name resolution]: ../name-resolution.html
[Parameter Environment]: ../typing_parameter_envs.html
[Parameter Environment]: ../typing-parameter-envs.html
[Trait Solving: Goals and Clauses]: ../traits/goals-and-clauses.html#domain-goals

View file

@ -53,10 +53,10 @@ Term | Meaning
<span id="normalize">normalize</span> | A general term for converting to a more canonical form, but in the case of rustc typically refers to [associated type normalization](../traits/goals-and-clauses.md#normalizeprojection---type).
<span id="newtype">newtype</span> | A wrapper around some other type (e.g., `struct Foo(T)` is a "newtype" for `T`). This is commonly used in Rust to give a stronger type for indices.
<span id="niche">niche</span> | Invalid bit patterns for a type _that can be used_ for layout optimizations. Some types cannot have certain bit patterns. For example, the `NonZero*` integers or the reference `&T` cannot be represented by a 0 bitstring. This means the compiler can perform layout optimizations by taking advantage of the invalid "niche value". An example application for this is the [*Discriminant elision on `Option`-like enums*](https://rust-lang.github.io/unsafe-code-guidelines/layout/enums.html#discriminant-elision-on-option-like-enums), which allows using a type's niche as the ["tag"](#tag) for an `enum` without requiring a separate field.
<span id="nll">NLL</span> | Short for [non-lexical lifetimes](../borrow_check/region_inference.md), this is an extension to Rust's borrowing system to make it be based on the control-flow graph.
<span id="nll">NLL</span> | Short for [non-lexical lifetimes](../borrow-check/region-inference.md), this is an extension to Rust's borrowing system to make it be based on the control-flow graph.
<span id="node-id">node-id or `NodeId`</span> | An index identifying a particular node in the AST or HIR; gradually being phased out and replaced with `HirId`. See [the HIR chapter for more](../hir.md#identifiers-in-the-hir).
<span id="obligation">obligation</span> | Something that must be proven by the trait system. ([see more](../traits/resolution.md))
<span id="placeholder">placeholder</span> | **NOTE: skolemization is deprecated by placeholder** a way of handling subtyping around "for-all" types (e.g., `for<'a> fn(&'a u32)`) as well as solving higher-ranked trait bounds (e.g., `for<'a> T: Trait<'a>`). See [the chapter on placeholder and universes](../borrow_check/region_inference/placeholders_and_universes.md) for more details.
<span id="placeholder">placeholder</span> | **NOTE: skolemization is deprecated by placeholder** a way of handling subtyping around "for-all" types (e.g., `for<'a> fn(&'a u32)`) as well as solving higher-ranked trait bounds (e.g., `for<'a> T: Trait<'a>`). See [the chapter on placeholder and universes](../borrow-check/region-inference/placeholders-and-universes.md) for more details.
<span id="point">point</span> | Used in the NLL analysis to refer to some particular location in the MIR; typically used to refer to a node in the control-flow graph.
<span id="projection">projection</span> | A general term for a "relative path", e.g. `x.f` is a "field projection", and `T::Item` is an ["associated type projection"](../traits/goals-and-clauses.md#trait-ref).
<span id="pc">promoted constants</span> | Constants extracted from a function and lifted to static scope; see [this section](../mir/index.md#promoted) for more details.

View file

@ -28,6 +28,7 @@ You can then run our test cases:
./x test --stage 1 tests/codegen-llvm/autodiff
./x test --stage 1 tests/pretty/autodiff
./x test --stage 1 tests/ui/autodiff
./x test --stage 1 tests/run-make/autodiff
./x test --stage 1 tests/ui/feature-gates/feature-gate-autodiff.rs
```

View file

@ -42,10 +42,10 @@ the [`mir_borrowck`] query.
- Next, we perform a number of
[dataflow analyses](./appendix/background.md#dataflow) that
compute what data is moved and when.
- We then do a [second type check](borrow_check/type_check.md) across the MIR:
- We then do a [second type check](borrow-check/type-check.md) across the MIR:
the purpose of this type check is to determine all of the constraints between
different regions.
- Next, we do [region inference](borrow_check/region_inference.md), which computes
- Next, we do [region inference](borrow-check/region-inference.md), which computes
the values of each region — basically, the points in the control-flow graph where
each lifetime must be valid according to the constraints we collected.
- At this point, we can compute the "borrows in scope" at each point.

View file

@ -20,15 +20,15 @@ Consider this example:
```rust,ignore
fn foo() {
let a: Vec<u32>;
// a is not initialized yet
a = vec![22];
// a is initialized here
std::mem::drop(a); // a is moved here
// a is no longer initialized here
let l = a.len(); //~ ERROR
@ -44,7 +44,7 @@ moves `a` into the call, and hence it becomes uninitialized again.
To make it easier to peruse, this section is broken into a number of
subsections:
- [Move paths](./moves_and_initialization/move_paths.html) the
- [Move paths](./moves-and-initialization/move-paths.md) the
*move path* concept that we use to track which local variables (or parts of
local variables, in some cases) are initialized.
- TODO *Rest not yet written* =)

View file

@ -158,7 +158,7 @@ See [#113971] for how we used to conflate the difference.
[#113971]: https://github.com/rust-lang/rust/issues/113971
[SCC]: https://en.wikipedia.org/wiki/Strongly_connected_component
[member constraints]: ./region_inference/member_constraints.md
[member constraints]: region-inference/member-constraints.md
**interaction with "once modulo regions" restriction**
In the example above, note the opaque type in the signature is `Opaque<'a>` and the one in the
@ -195,7 +195,7 @@ fn test<'a>() -> Opaque<'a> {
}
```
**Motivation:**
**Motivation:**
In closure bodies, external lifetimes, although being categorized as "universal" lifetimes,
behave more like existential lifetimes in that the relations between them are not known ahead of
time, instead their values are inferred just like existential lifetimes and the requirements are
@ -208,7 +208,7 @@ Here is an example that details how :
```rust
type Opaque<'x, 'y> = impl Sized;
//
//
fn test<'a, 'b>(s: &'a str) -> impl FnOnce() -> Opaque<'a, 'b> {
move || { s }
//~^ ERROR hidden type for `Opaque<'_, '_>` captures lifetime that does not appear in bounds

View file

@ -34,14 +34,14 @@ The MIR-based region analysis consists of two major functions:
- The [NLL RFC] also includes fairly thorough (and hopefully readable)
coverage.
[cp]: ./region_inference/constraint_propagation.md
[cp]: ./region-inference/constraint-propagation.md
[fvb]: ../appendix/background.md#free-vs-bound
[`replace_regions_in_mir`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/nll/fn.replace_regions_in_mir.html
[`compute_regions`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/nll/fn.compute_regions.html
[`RegionInferenceContext`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/region_infer/struct.RegionInferenceContext.html
[`solve`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/region_infer/struct.RegionInferenceContext.html#method.solve
[NLL RFC]: https://rust-lang.github.io/rfcs/2094-nll.html
[MIR type checker]: ./type_check.md
[MIR type checker]: ./type-check.md
## Universal regions
@ -97,7 +97,7 @@ The kinds of region elements are as follows:
- There is an element `!1` for each placeholder region `!1`. This
corresponds (intuitively) to some unknown set of other elements
for details on placeholders, see the section
[placeholders and universes](region_inference/placeholders_and_universes.md).
[placeholders and universes](region-inference/placeholders-and-universes.md).
## Constraints

Some files were not shown because too many files have changed in this diff Show more