Merge pull request #4404 from rust-lang/rustup-2025-06-18

Automatic Rustup
This commit is contained in:
Ralf Jung 2025-06-18 08:00:38 +00:00 committed by GitHub
commit e8fccabe55
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
531 changed files with 10413 additions and 5373 deletions

View file

@ -710,6 +710,12 @@ impl Pat {
}
}
impl From<P<Pat>> for Pat {
fn from(value: P<Pat>) -> Self {
*value
}
}
/// A single field in a struct pattern.
///
/// Patterns like the fields of `Foo { x, ref y, ref mut z }`
@ -1553,17 +1559,23 @@ impl Expr {
)
}
/// Creates a dummy `P<Expr>`.
/// Creates a dummy `Expr`.
///
/// Should only be used when it will be replaced afterwards or as a return value when an error was encountered.
pub fn dummy() -> P<Expr> {
P(Expr {
pub fn dummy() -> Expr {
Expr {
id: DUMMY_NODE_ID,
kind: ExprKind::Dummy,
span: DUMMY_SP,
attrs: ThinVec::new(),
tokens: None,
})
}
}
}
impl From<P<Expr>> for Expr {
fn from(value: P<Expr>) -> Self {
*value
}
}
@ -2374,6 +2386,12 @@ impl Clone for Ty {
}
}
impl From<P<Ty>> for Ty {
fn from(value: P<Ty>) -> Self {
*value
}
}
impl Ty {
pub fn peel_refs(&self) -> &Self {
let mut final_ty = self;

View file

@ -168,7 +168,7 @@ pub trait MutVisitor: Sized + MutVisitorResult<Result = ()> {
walk_flat_map_arm(self, arm)
}
fn visit_pat(&mut self, p: &mut P<Pat>) {
fn visit_pat(&mut self, p: &mut Pat) {
walk_pat(self, p);
}
@ -176,7 +176,7 @@ pub trait MutVisitor: Sized + MutVisitorResult<Result = ()> {
walk_anon_const(self, c);
}
fn visit_expr(&mut self, e: &mut P<Expr>) {
fn visit_expr(&mut self, e: &mut Expr) {
walk_expr(self, e);
}
@ -194,7 +194,7 @@ pub trait MutVisitor: Sized + MutVisitorResult<Result = ()> {
walk_generic_arg(self, arg);
}
fn visit_ty(&mut self, t: &mut P<Ty>) {
fn visit_ty(&mut self, t: &mut Ty) {
walk_ty(self, t);
}

View file

@ -1085,6 +1085,7 @@ pub enum NtExprKind {
Expr2021 { inferred: bool },
}
/// A macro nonterminal, known in documentation as a fragment specifier.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
pub enum NonterminalKind {
Item,

View file

@ -884,7 +884,7 @@ macro_rules! common_visitor_and_walkers {
TyKind::BareFn(function_declaration) => {
let BareFnTy { safety, ext: _, generic_params, decl, decl_span } =
&$($mut)? **function_declaration;
visit_safety(vis, safety);
try_visit!(visit_safety(vis, safety));
try_visit!(visit_generic_params(vis, generic_params));
try_visit!(vis.visit_fn_decl(decl));
try_visit!(visit_span(vis, decl_span));
@ -1235,7 +1235,7 @@ macro_rules! common_visitor_and_walkers {
bounds,
bound_generic_params,
}) => {
visit_generic_params(vis, bound_generic_params);
try_visit!(visit_generic_params(vis, bound_generic_params));
try_visit!(vis.visit_ty(bounded_ty));
walk_list!(vis, visit_param_bound, bounds, BoundKind::Bound);
}
@ -1420,7 +1420,7 @@ macro_rules! common_visitor_and_walkers {
let StructExpr { qself, path, fields, rest } = &$($mut)?**se;
try_visit!(vis.visit_qself(qself));
try_visit!(vis.visit_path(path));
visit_expr_fields(vis, fields);
try_visit!(visit_expr_fields(vis, fields));
match rest {
StructRest::Base(expr) => try_visit!(vis.visit_expr(expr)),
StructRest::Rest(_span) => {}

View file

@ -179,6 +179,8 @@ ast_lowering_underscore_expr_lhs_assign =
in expressions, `_` can only be used on the left-hand side of an assignment
.label = `_` not allowed here
ast_lowering_union_default_field_values = unions cannot have default field values
ast_lowering_unstable_inline_assembly = inline assembly is not stable yet on this architecture
ast_lowering_unstable_inline_assembly_label_operand_with_outputs =
using both label and output operands for inline assembly is unstable

View file

@ -475,3 +475,10 @@ pub(crate) struct UseConstGenericArg {
#[suggestion_part(code = "{other_args}")]
pub call_args: Span,
}
#[derive(Diagnostic)]
#[diag(ast_lowering_union_default_field_values)]
pub(crate) struct UnionWithDefault {
#[primary_span]
pub span: Span,
}

View file

@ -17,6 +17,7 @@ use tracing::instrument;
use super::errors::{
InvalidAbi, InvalidAbiSuggestion, MisplacedRelaxTraitBound, TupleStructWithDefault,
UnionWithDefault,
};
use super::stability::{enabled_names, gate_unstable_abi};
use super::{
@ -316,7 +317,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
ImplTraitContext::Disallowed(ImplTraitPosition::Generic),
|this| {
this.arena.alloc_from_iter(
enum_definition.variants.iter().map(|x| this.lower_variant(x)),
enum_definition.variants.iter().map(|x| this.lower_variant(i, x)),
)
},
);
@ -328,7 +329,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
generics,
id,
ImplTraitContext::Disallowed(ImplTraitPosition::Generic),
|this| this.lower_variant_data(hir_id, struct_def),
|this| this.lower_variant_data(hir_id, i, struct_def),
);
hir::ItemKind::Struct(ident, generics, struct_def)
}
@ -338,7 +339,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
generics,
id,
ImplTraitContext::Disallowed(ImplTraitPosition::Generic),
|this| this.lower_variant_data(hir_id, vdata),
|this| this.lower_variant_data(hir_id, i, vdata),
);
hir::ItemKind::Union(ident, generics, vdata)
}
@ -714,13 +715,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> {
fn lower_variant(&mut self, item_kind: &ItemKind, v: &Variant) -> hir::Variant<'hir> {
let hir_id = self.lower_node_id(v.id);
self.lower_attrs(hir_id, &v.attrs, v.span);
hir::Variant {
hir_id,
def_id: self.local_def_id(v.id),
data: self.lower_variant_data(hir_id, &v.data),
data: self.lower_variant_data(hir_id, item_kind, &v.data),
disr_expr: v.disr_expr.as_ref().map(|e| self.lower_anon_const_to_anon_const(e)),
ident: self.lower_ident(v.ident),
span: self.lower_span(v.span),
@ -730,15 +731,36 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn lower_variant_data(
&mut self,
parent_id: hir::HirId,
item_kind: &ItemKind,
vdata: &VariantData,
) -> hir::VariantData<'hir> {
match vdata {
VariantData::Struct { fields, recovered } => hir::VariantData::Struct {
fields: self
VariantData::Struct { fields, recovered } => {
let fields = self
.arena
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_field_def(f))),
recovered: *recovered,
},
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_field_def(f)));
if let ItemKind::Union(..) = item_kind {
for field in &fields[..] {
if let Some(default) = field.default {
// Unions cannot derive `Default`, and it's not clear how to use default
// field values of unions if that was supported. Therefore, blanket reject
// trying to use field values with unions.
if self.tcx.features().default_field_values() {
self.dcx().emit_err(UnionWithDefault { span: default.span });
} else {
let _ = self.dcx().span_delayed_bug(
default.span,
"expected union default field values feature gate error but none \
was produced",
);
}
}
}
}
hir::VariantData::Struct { fields, recovered: *recovered }
}
VariantData::Tuple(fields, id) => {
let ctor_id = self.lower_node_id(*id);
self.alias_attrs(ctor_id, parent_id);

View file

@ -224,20 +224,6 @@ impl<'a> AstValidator<'a> {
}
}
fn visit_struct_field_def(&mut self, field: &'a FieldDef) {
if let Some(ref ident) = field.ident
&& ident.name == kw::Underscore
{
self.visit_vis(&field.vis);
self.visit_ident(ident);
self.visit_ty_common(&field.ty);
self.walk_ty(&field.ty);
walk_list!(self, visit_attribute, &field.attrs);
} else {
self.visit_field_def(field);
}
}
fn dcx(&self) -> DiagCtxtHandle<'a> {
self.sess.dcx()
}
@ -1135,8 +1121,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
VariantData::Struct { fields, .. } => {
self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident);
self.visit_generics(generics);
// Permit `Anon{Struct,Union}` as field type.
walk_list!(self, visit_struct_field_def, fields);
walk_list!(self, visit_field_def, fields);
}
_ => visit::walk_item(self, item),
},
@ -1148,8 +1133,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
VariantData::Struct { fields, .. } => {
self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident);
self.visit_generics(generics);
// Permit `Anon{Struct,Union}` as field type.
walk_list!(self, visit_struct_field_def, fields);
walk_list!(self, visit_field_def, fields);
}
_ => visit::walk_item(self, item),
}

View file

@ -342,10 +342,7 @@ impl<'tcx> BorrowExplanation<'tcx> {
}
}
} else if let LocalInfo::BlockTailTemp(info) = local_decl.local_info() {
let sp = info
.span
.find_ancestor_in_same_ctxt(local_decl.source_info.span)
.unwrap_or(info.span);
let sp = info.span.find_oldest_ancestor_in_same_ctxt();
if info.tail_result_is_ignored {
// #85581: If the first mutable borrow's scope contains
// the second borrow, this suggestion isn't helpful.

View file

@ -155,7 +155,7 @@ impl CfgEval<'_> {
impl MutVisitor for CfgEval<'_> {
#[instrument(level = "trace", skip(self))]
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
fn visit_expr(&mut self, expr: &mut ast::Expr) {
self.0.configure_expr(expr, false);
mut_visit::walk_expr(self, expr);
}

View file

@ -1,5 +1,4 @@
use ast::HasAttrs;
use ast::ptr::P;
use rustc_ast::mut_visit::MutVisitor;
use rustc_ast::visit::BoundKind;
use rustc_ast::{
@ -378,11 +377,11 @@ struct TypeSubstitution<'a> {
}
impl<'a> ast::mut_visit::MutVisitor for TypeSubstitution<'a> {
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
fn visit_ty(&mut self, ty: &mut ast::Ty) {
if let Some(name) = ty.kind.is_simple_path()
&& name == self.from_name
{
**ty = self.to_ty.clone();
*ty = self.to_ty.clone();
self.rewritten = true;
} else {
ast::mut_visit::walk_ty(self, ty);

View file

@ -14,8 +14,14 @@
#![no_core]
#![allow(dead_code, internal_features, ambiguous_wide_pointer_comparisons)]
#[lang = "pointee_sized"]
pub trait PointeeSized {}
#[lang = "meta_sized"]
pub trait MetaSized: PointeeSized {}
#[lang = "sized"]
pub trait Sized {}
pub trait Sized: MetaSized {}
#[lang = "destruct"]
pub trait Destruct {}
@ -24,35 +30,35 @@ pub trait Destruct {}
pub trait Tuple {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
pub trait Unsize<T: PointeeSized>: PointeeSized {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a mut U> for &'a mut T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *const T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for *mut T {}
#[lang = "dispatch_from_dyn"]
pub trait DispatchFromDyn<T> {}
// &T -> &U
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a U> for &'a T {}
// &mut T -> &mut U
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a mut U> for &'a mut T {}
// *const T -> *const U
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*const U> for *const T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*const U> for *const T {}
// *mut T -> *mut U
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*mut U> for *mut T {}
impl<T: MetaSized + Unsize<U>, U: MetaSized> DispatchFromDyn<Box<U>> for Box<T> {}
#[lang = "legacy_receiver"]
pub trait LegacyReceiver {}
impl<T: ?Sized> LegacyReceiver for &T {}
impl<T: ?Sized> LegacyReceiver for &mut T {}
impl<T: ?Sized> LegacyReceiver for Box<T> {}
impl<T: PointeeSized> LegacyReceiver for &T {}
impl<T: PointeeSized> LegacyReceiver for &mut T {}
impl<T: MetaSized> LegacyReceiver for Box<T> {}
#[lang = "copy"]
pub trait Copy {}
@ -74,9 +80,9 @@ impl Copy for isize {}
impl Copy for f32 {}
impl Copy for f64 {}
impl Copy for char {}
impl<'a, T: ?Sized> Copy for &'a T {}
impl<T: ?Sized> Copy for *const T {}
impl<T: ?Sized> Copy for *mut T {}
impl<'a, T: PointeeSized> Copy for &'a T {}
impl<T: PointeeSized> Copy for *const T {}
impl<T: PointeeSized> Copy for *mut T {}
impl<T: Copy> Copy for Option<T> {}
#[lang = "sync"]
@ -94,17 +100,17 @@ unsafe impl Sync for i32 {}
unsafe impl Sync for isize {}
unsafe impl Sync for char {}
unsafe impl Sync for f32 {}
unsafe impl<'a, T: ?Sized> Sync for &'a T {}
unsafe impl<'a, T: PointeeSized> Sync for &'a T {}
unsafe impl<T: Sync, const N: usize> Sync for [T; N] {}
#[lang = "freeze"]
unsafe auto trait Freeze {}
unsafe impl<T: ?Sized> Freeze for PhantomData<T> {}
unsafe impl<T: ?Sized> Freeze for *const T {}
unsafe impl<T: ?Sized> Freeze for *mut T {}
unsafe impl<T: ?Sized> Freeze for &T {}
unsafe impl<T: ?Sized> Freeze for &mut T {}
unsafe impl<T: PointeeSized> Freeze for PhantomData<T> {}
unsafe impl<T: PointeeSized> Freeze for *const T {}
unsafe impl<T: PointeeSized> Freeze for *mut T {}
unsafe impl<T: PointeeSized> Freeze for &T {}
unsafe impl<T: PointeeSized> Freeze for &mut T {}
#[lang = "structural_peq"]
pub trait StructuralPartialEq {}
@ -443,7 +449,7 @@ pub enum Option<T> {
pub use Option::*;
#[lang = "phantom_data"]
pub struct PhantomData<T: ?Sized>;
pub struct PhantomData<T: PointeeSized>;
#[lang = "fn_once"]
#[rustc_paren_sugar]
@ -564,18 +570,18 @@ pub trait Deref {
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
pub struct NonNull<T: ?Sized>(pub *const T);
pub struct NonNull<T: PointeeSized>(pub *const T);
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
pub struct Unique<T: ?Sized> {
pub struct Unique<T: PointeeSized> {
pub pointer: NonNull<T>,
pub _marker: PhantomData<T>,
}
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[lang = "global_alloc_ty"]
pub struct Global;

View file

@ -19,8 +19,14 @@ unsafe extern "C" fn _Unwind_Resume() {
intrinsics::unreachable();
}
#[lang = "pointee_sized"]
pub trait PointeeSized {}
#[lang = "meta_sized"]
pub trait MetaSized: PointeeSized {}
#[lang = "sized"]
pub trait Sized {}
pub trait Sized: MetaSized {}
#[lang = "destruct"]
pub trait Destruct {}
@ -29,35 +35,35 @@ pub trait Destruct {}
pub trait Tuple {}
#[lang = "unsize"]
pub trait Unsize<T: ?Sized> {}
pub trait Unsize<T: PointeeSized>: PointeeSized {}
#[lang = "coerce_unsized"]
pub trait CoerceUnsized<T> {}
impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a mut U> for &'a mut T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *const T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for *mut T {}
#[lang = "dispatch_from_dyn"]
pub trait DispatchFromDyn<T> {}
// &T -> &U
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a U> for &'a T {}
// &mut T -> &mut U
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {}
impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a mut U> for &'a mut T {}
// *const T -> *const U
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*const U> for *const T {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*const U> for *const T {}
// *mut T -> *mut U
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {}
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U, ()>> for Box<T, ()> {}
impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*mut U> for *mut T {}
impl<T: MetaSized + Unsize<U>, U: MetaSized> DispatchFromDyn<Box<U, ()>> for Box<T, ()> {}
#[lang = "legacy_receiver"]
pub trait LegacyReceiver {}
impl<T: ?Sized> LegacyReceiver for &T {}
impl<T: ?Sized> LegacyReceiver for &mut T {}
impl<T: ?Sized, A: Allocator> LegacyReceiver for Box<T, A> {}
impl<T: PointeeSized> LegacyReceiver for &T {}
impl<T: PointeeSized> LegacyReceiver for &mut T {}
impl<T: MetaSized> LegacyReceiver for Box<T> {}
#[lang = "receiver"]
trait Receiver {}
@ -84,9 +90,9 @@ impl Copy for i128 {}
impl Copy for f32 {}
impl Copy for f64 {}
impl Copy for char {}
impl<'a, T: ?Sized> Copy for &'a T {}
impl<T: ?Sized> Copy for *const T {}
impl<T: ?Sized> Copy for *mut T {}
impl<'a, T: PointeeSized> Copy for &'a T {}
impl<T: PointeeSized> Copy for *const T {}
impl<T: PointeeSized> Copy for *mut T {}
#[lang = "sync"]
pub unsafe trait Sync {}
@ -102,17 +108,17 @@ unsafe impl Sync for i16 {}
unsafe impl Sync for i32 {}
unsafe impl Sync for isize {}
unsafe impl Sync for char {}
unsafe impl<'a, T: ?Sized> Sync for &'a T {}
unsafe impl<'a, T: PointeeSized> Sync for &'a T {}
unsafe impl Sync for [u8; 16] {}
#[lang = "freeze"]
unsafe auto trait Freeze {}
unsafe impl<T: ?Sized> Freeze for PhantomData<T> {}
unsafe impl<T: ?Sized> Freeze for *const T {}
unsafe impl<T: ?Sized> Freeze for *mut T {}
unsafe impl<T: ?Sized> Freeze for &T {}
unsafe impl<T: ?Sized> Freeze for &mut T {}
unsafe impl<T: PointeeSized> Freeze for PhantomData<T> {}
unsafe impl<T: PointeeSized> Freeze for *const T {}
unsafe impl<T: PointeeSized> Freeze for *mut T {}
unsafe impl<T: PointeeSized> Freeze for &T {}
unsafe impl<T: PointeeSized> Freeze for &mut T {}
#[lang = "structural_peq"]
pub trait StructuralPartialEq {}
@ -456,7 +462,7 @@ pub enum Option<T> {
pub use Option::*;
#[lang = "phantom_data"]
pub struct PhantomData<T: ?Sized>;
pub struct PhantomData<T: PointeeSized>;
#[lang = "fn_once"]
#[rustc_paren_sugar]
@ -576,18 +582,18 @@ impl Allocator for Global {}
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
pub struct NonNull<T: ?Sized>(pub *const T);
pub struct NonNull<T: PointeeSized>(pub *const T);
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
pub struct Unique<T: ?Sized> {
pub struct Unique<T: PointeeSized> {
pub pointer: NonNull<T>,
pub _marker: PhantomData<T>,
}
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> {}
impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> {}
#[lang = "owned_box"]
pub struct Box<T: ?Sized, A: Allocator = Global>(Unique<T>, A);

View file

@ -114,7 +114,7 @@ fn match_args_from_caller_to_enzyme<'ll>(
let mul = unsafe {
llvm::LLVMBuildMul(
builder.llbuilder,
cx.get_const_i64(elem_bytes_size),
cx.get_const_int(cx.type_i64(), elem_bytes_size),
next_outer_arg,
UNNAMED,
)
@ -385,7 +385,7 @@ fn generate_enzyme_call<'ll>(
if attrs.width > 1 {
let enzyme_width = cx.create_metadata("enzyme_width".to_string()).unwrap();
args.push(cx.get_metadata_value(enzyme_width));
args.push(cx.get_const_i64(attrs.width as u64));
args.push(cx.get_const_int(cx.type_i64(), attrs.width as u64));
}
let has_sret = has_sret(outer_fn);

View file

@ -99,14 +99,14 @@ impl<'ll, CX: Borrow<SCx<'ll>>> BackendTypes for GenericCx<'ll, CX> {
type DIVariable = &'ll llvm::debuginfo::DIVariable;
}
impl<'ll> CodegenCx<'ll, '_> {
impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
pub(crate) fn const_array(&self, ty: &'ll Type, elts: &[&'ll Value]) -> &'ll Value {
let len = u64::try_from(elts.len()).expect("LLVMConstArray2 elements len overflow");
unsafe { llvm::LLVMConstArray2(ty, elts.as_ptr(), len) }
}
pub(crate) fn const_bytes(&self, bytes: &[u8]) -> &'ll Value {
bytes_in_context(self.llcx, bytes)
bytes_in_context(self.llcx(), bytes)
}
pub(crate) fn const_get_elt(&self, v: &'ll Value, idx: u64) -> &'ll Value {

View file

@ -679,11 +679,8 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
llvm::LLVMMetadataAsValue(self.llcx(), metadata)
}
// FIXME(autodiff): We should split `ConstCodegenMethods` to pull the reusable parts
// onto a trait that is also implemented for GenericCx.
pub(crate) fn get_const_i64(&self, n: u64) -> &'ll Value {
let ty = unsafe { llvm::LLVMInt64TypeInContext(self.llcx()) };
unsafe { llvm::LLVMConstInt(ty, n, llvm::False) }
pub(crate) fn get_const_int(&self, ty: &'ll Type, val: u64) -> &'ll Value {
unsafe { llvm::LLVMConstInt(ty, val, llvm::False) }
}
pub(crate) fn get_function(&self, name: &str) -> Option<&'ll Value> {

View file

@ -69,23 +69,6 @@ pub fn ensure_removed(dcx: DiagCtxtHandle<'_>, path: &Path) {
}
}
fn check_link_info_print_request(sess: &Session, crate_types: &[CrateType]) {
let print_native_static_libs =
sess.opts.prints.iter().any(|p| p.kind == PrintKind::NativeStaticLibs);
let has_staticlib = crate_types.iter().any(|ct| *ct == CrateType::Staticlib);
if print_native_static_libs {
if !has_staticlib {
sess.dcx()
.warn(format!("cannot output linkage information without staticlib crate-type"));
sess.dcx()
.note(format!("consider `--crate-type staticlib` to print linkage information"));
} else if !sess.opts.output_types.should_link() {
sess.dcx()
.warn(format!("cannot output linkage information when --emit link is not passed"));
}
}
}
/// Performs the linkage portion of the compilation phase. This will generate all
/// of the requested outputs for this compilation session.
pub fn link_binary(
@ -208,8 +191,6 @@ pub fn link_binary(
}
}
check_link_info_print_request(sess, &codegen_results.crate_info.crate_types);
// Remove the temporary object file and metadata if we aren't saving temps.
sess.time("link_binary_remove_temps", || {
// If the user requests that temporaries are saved, don't delete any.

View file

@ -379,6 +379,24 @@ pub(super) fn elf_e_flags(architecture: Architecture, sess: &Session) -> u32 {
};
e_flags
}
Architecture::PowerPc64 => {
const EF_PPC64_ABI_UNKNOWN: u32 = 0;
const EF_PPC64_ABI_ELF_V1: u32 = 1;
const EF_PPC64_ABI_ELF_V2: u32 = 2;
match sess.target.options.llvm_abiname.as_ref() {
// If the flags do not correctly indicate the ABI,
// linkers such as ld.lld assume that the ppc64 object files are always ELFv2
// which leads to broken binaries if ELFv1 is used for the object files.
"elfv1" => EF_PPC64_ABI_ELF_V1,
"elfv2" => EF_PPC64_ABI_ELF_V2,
"" if sess.target.options.binary_format.to_object() == BinaryFormat::Elf => {
bug!("No ABI specified for this PPC64 ELF target");
}
// Fall back
_ => EF_PPC64_ABI_UNKNOWN,
}
}
_ => 0,
}
}

View file

@ -1,5 +1,7 @@
use std::ptr::Alignment;
use rustc_serialize::PointeeSized;
/// Returns the ABI-required minimum alignment of a type in bytes.
///
/// This is equivalent to [`align_of`], but also works for some unsized
@ -17,7 +19,7 @@ pub const fn align_of<T: ?Sized + Aligned>() -> Alignment {
/// example `[T]` has alignment of `T`.
///
/// [`align_of::<Self>()`]: align_of
pub unsafe trait Aligned {
pub unsafe trait Aligned: PointeeSized {
/// Alignment of `Self`.
const ALIGN: Alignment;
}

View file

@ -12,6 +12,7 @@
#![allow(rustc::potential_query_instability)]
#![cfg_attr(bootstrap, feature(cfg_match))]
#![cfg_attr(not(bootstrap), feature(cfg_select))]
#![cfg_attr(not(bootstrap), feature(sized_hierarchy))]
#![deny(unsafe_op_in_unsafe_fn)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![doc(rust_logo)]
@ -43,6 +44,9 @@ use std::fmt;
pub use atomic_ref::AtomicRef;
pub use ena::{snapshot_vec, undo_log, unify};
pub use rustc_index::static_assert_size;
// re-exported for `rustc_smir`
// FIXME(sized_hierarchy): remove with `cfg(bootstrap)`, see `rustc_serialize/src/lib.rs`
pub use rustc_serialize::PointeeSized;
pub mod aligned;
pub mod base_n;

View file

@ -1,5 +1,7 @@
use std::alloc::Allocator;
use rustc_serialize::PointeeSized;
#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
@ -15,7 +17,7 @@ pub unsafe auto trait DynSend {}
pub unsafe auto trait DynSync {}
// Same with `Sync` and `Send`.
unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
unsafe impl<T: DynSync + ?Sized + PointeeSized> DynSend for &T {}
macro_rules! impls_dyn_send_neg {
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
@ -27,9 +29,9 @@ macro_rules! impls_dyn_send_neg {
impls_dyn_send_neg!(
[std::env::Args]
[std::env::ArgsOs]
[*const T where T: ?Sized]
[*mut T where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized]
[*const T where T: ?Sized + PointeeSized]
[*mut T where T: ?Sized + PointeeSized]
[std::ptr::NonNull<T> where T: ?Sized + PointeeSized]
[std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
[std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
[std::sync::MutexGuard<'_, T> where T: ?Sized]
@ -100,12 +102,12 @@ macro_rules! impls_dyn_sync_neg {
impls_dyn_sync_neg!(
[std::env::Args]
[std::env::ArgsOs]
[*const T where T: ?Sized]
[*mut T where T: ?Sized]
[*const T where T: ?Sized + PointeeSized]
[*mut T where T: ?Sized + PointeeSized]
[std::cell::Cell<T> where T: ?Sized]
[std::cell::RefCell<T> where T: ?Sized]
[std::cell::UnsafeCell<T> where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized + PointeeSized]
[std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
[std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
[std::cell::OnceCell<T> where T]
@ -175,10 +177,10 @@ impl_dyn_sync!(
[thin_vec::ThinVec<T> where T: DynSync]
);
pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
pub fn assert_dyn_sync<T: ?Sized + PointeeSized + DynSync>() {}
pub fn assert_dyn_send<T: ?Sized + PointeeSized + DynSend>() {}
pub fn assert_dyn_send_val<T: ?Sized + PointeeSized + DynSend>(_t: &T) {}
pub fn assert_dyn_send_sync_val<T: ?Sized + PointeeSized + DynSync + DynSend>(_t: &T) {}
#[derive(Copy, Clone)]
pub struct FromDyn<T>(T);
@ -231,10 +233,10 @@ impl<T> std::ops::DerefMut for FromDyn<T> {
// an instance of `DynSend` and `DynSync`, since the compiler cannot infer
// it automatically in some cases. (e.g. Box<dyn Send / Sync>)
#[derive(Copy, Clone)]
pub struct IntoDynSyncSend<T: ?Sized>(pub T);
pub struct IntoDynSyncSend<T: ?Sized + PointeeSized>(pub T);
unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {}
unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {}
unsafe impl<T: ?Sized + PointeeSized + Send> DynSend for IntoDynSyncSend<T> {}
unsafe impl<T: ?Sized + PointeeSized + Sync> DynSync for IntoDynSyncSend<T> {}
impl<T> std::ops::Deref for IntoDynSyncSend<T> {
type Target = T;

View file

@ -444,7 +444,7 @@ pub(crate) struct InvalidFragmentSpecifier {
#[primary_span]
pub span: Span,
pub fragment: Ident,
pub help: String,
pub help: &'static str,
}
#[derive(Diagnostic)]

View file

@ -1768,7 +1768,7 @@ impl InvocationCollectorNode for ast::Crate {
}
}
impl InvocationCollectorNode for P<ast::Ty> {
impl InvocationCollectorNode for ast::Ty {
type OutputTy = P<ast::Ty>;
const KIND: AstFragmentKind = AstFragmentKind::Ty;
fn to_annotatable(self) -> Annotatable {
@ -1791,7 +1791,7 @@ impl InvocationCollectorNode for P<ast::Ty> {
}
}
impl InvocationCollectorNode for P<ast::Pat> {
impl InvocationCollectorNode for ast::Pat {
type OutputTy = P<ast::Pat>;
const KIND: AstFragmentKind = AstFragmentKind::Pat;
fn to_annotatable(self) -> Annotatable {
@ -1814,11 +1814,11 @@ impl InvocationCollectorNode for P<ast::Pat> {
}
}
impl InvocationCollectorNode for P<ast::Expr> {
impl InvocationCollectorNode for ast::Expr {
type OutputTy = P<ast::Expr>;
const KIND: AstFragmentKind = AstFragmentKind::Expr;
fn to_annotatable(self) -> Annotatable {
Annotatable::Expr(self)
Annotatable::Expr(P(self))
}
fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy {
fragment.make_expr()
@ -1955,29 +1955,29 @@ impl DummyAstNode for ast::Crate {
}
}
impl DummyAstNode for P<ast::Ty> {
impl DummyAstNode for ast::Ty {
fn dummy() -> Self {
P(ast::Ty {
ast::Ty {
id: DUMMY_NODE_ID,
kind: TyKind::Dummy,
span: Default::default(),
tokens: Default::default(),
})
}
}
}
impl DummyAstNode for P<ast::Pat> {
impl DummyAstNode for ast::Pat {
fn dummy() -> Self {
P(ast::Pat {
ast::Pat {
id: DUMMY_NODE_ID,
kind: PatKind::Wild,
span: Default::default(),
tokens: Default::default(),
})
}
}
}
impl DummyAstNode for P<ast::Expr> {
impl DummyAstNode for ast::Expr {
fn dummy() -> Self {
ast::Expr::dummy()
}
@ -1985,7 +1985,7 @@ impl DummyAstNode for P<ast::Expr> {
impl DummyAstNode for AstNodeWrapper<P<ast::Expr>, MethodReceiverTag> {
fn dummy() -> Self {
AstNodeWrapper::new(ast::Expr::dummy(), MethodReceiverTag)
AstNodeWrapper::new(P(ast::Expr::dummy()), MethodReceiverTag)
}
}
@ -2272,7 +2272,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
}
}
fn visit_node<Node: InvocationCollectorNode<OutputTy = Node> + DummyAstNode>(
fn visit_node<Node: InvocationCollectorNode<OutputTy: Into<Node>> + DummyAstNode>(
&mut self,
node: &mut Node,
) {
@ -2297,6 +2297,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
*node = self
.collect_attr((attr, pos, derives), n.to_annotatable(), Node::KIND)
.make_ast::<Node>()
.into()
}
},
None if node.is_mac_call() => {
@ -2304,7 +2305,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
let (mac, attrs, _) = n.take_mac_call();
self.check_attributes(&attrs, &mac);
*node = self.collect_bang(mac, Node::KIND).make_ast::<Node>()
*node = self.collect_bang(mac, Node::KIND).make_ast::<Node>().into()
}
None if node.delegation().is_some() => unreachable!(),
None => {
@ -2414,15 +2415,15 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
self.visit_node(node)
}
fn visit_ty(&mut self, node: &mut P<ast::Ty>) {
fn visit_ty(&mut self, node: &mut ast::Ty) {
self.visit_node(node)
}
fn visit_pat(&mut self, node: &mut P<ast::Pat>) {
fn visit_pat(&mut self, node: &mut ast::Pat) {
self.visit_node(node)
}
fn visit_expr(&mut self, node: &mut P<ast::Expr>) {
fn visit_expr(&mut self, node: &mut ast::Expr) {
// FIXME: Feature gating is performed inconsistently between `Expr` and `OptExpr`.
if let Some(attr) = node.attrs.first() {
self.cfg().maybe_emit_expr_attr_err(attr);

View file

@ -78,7 +78,13 @@ enum TokenTree {
/// only covers the ident, e.g. `var`.)
MetaVar(Span, Ident),
/// e.g., `$var:expr`. Only appears on the LHS.
MetaVarDecl(Span, Ident /* name to bind */, Option<NonterminalKind>),
MetaVarDecl {
span: Span,
/// Name to bind.
name: Ident,
/// The fragment specifier.
kind: NonterminalKind,
},
/// A meta-variable expression inside `${...}`.
MetaVarExpr(DelimSpan, MetaVarExpr),
}
@ -102,7 +108,7 @@ impl TokenTree {
match *self {
TokenTree::Token(Token { span, .. })
| TokenTree::MetaVar(span, _)
| TokenTree::MetaVarDecl(span, _, _) => span,
| TokenTree::MetaVarDecl { span, .. } => span,
TokenTree::Delimited(span, ..)
| TokenTree::MetaVarExpr(span, _)
| TokenTree::Sequence(span, _) => span.entire(),

View file

@ -24,6 +24,7 @@ pub(super) fn failed_to_match_macro(
arg: TokenStream,
lhses: &[Vec<MatcherLoc>],
) -> (Span, ErrorGuaranteed) {
debug!("failed to match macro");
// An error occurred, try the expansion again, tracking the expansion closely for better
// diagnostics.
let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp);

View file

@ -117,7 +117,6 @@ use rustc_session::parse::ParseSess;
use rustc_span::{ErrorGuaranteed, MacroRulesNormalizedIdent, Span, kw};
use smallvec::SmallVec;
use super::quoted::VALID_FRAGMENT_NAMES_MSG;
use crate::errors;
use crate::mbe::{KleeneToken, TokenTree};
@ -263,14 +262,7 @@ fn check_binders(
}
}
// Similarly, this can only happen when checking a toplevel macro.
TokenTree::MetaVarDecl(span, name, kind) => {
if kind.is_none() && node_id != DUMMY_NODE_ID {
psess.dcx().emit_err(errors::MissingFragmentSpecifier {
span,
add_span: span.shrink_to_hi(),
valid: VALID_FRAGMENT_NAMES_MSG,
});
}
TokenTree::MetaVarDecl { span, name, .. } => {
if !macros.is_empty() {
psess.dcx().span_bug(span, "unexpected MetaVarDecl in nested lhs");
}
@ -339,7 +331,7 @@ fn check_occurrences(
) {
match *rhs {
TokenTree::Token(..) => {}
TokenTree::MetaVarDecl(span, _name, _kind) => {
TokenTree::MetaVarDecl { span, .. } => {
psess.dcx().span_bug(span, "unexpected MetaVarDecl in rhs")
}
TokenTree::MetaVar(span, name) => {

View file

@ -122,7 +122,7 @@ pub(crate) enum MatcherLoc {
MetaVarDecl {
span: Span,
bind: Ident,
kind: Option<NonterminalKind>,
kind: NonterminalKind,
next_metavar: usize,
seq_depth: usize,
},
@ -151,12 +151,7 @@ impl Display for MatcherLoc {
write!(f, "{}", token_descr(token))
}
MatcherLoc::MetaVarDecl { bind, kind, .. } => {
write!(f, "meta-variable `${bind}")?;
if let Some(kind) = kind {
write!(f, ":{kind}")?;
}
write!(f, "`")?;
Ok(())
write!(f, "meta-variable `${bind}:{kind}`")
}
MatcherLoc::Eof => f.write_str("end of macro"),
@ -220,7 +215,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
seq_depth,
};
}
&TokenTree::MetaVarDecl(span, bind, kind) => {
&TokenTree::MetaVarDecl { span, name: bind, kind } => {
locs.push(MatcherLoc::MetaVarDecl {
span,
bind,
@ -330,7 +325,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
matcher
.iter()
.map(|tt| match tt {
TokenTree::MetaVarDecl(..) => 1,
TokenTree::MetaVarDecl { .. } => 1,
TokenTree::Sequence(_, seq) => seq.num_captures,
TokenTree::Delimited(.., delim) => count_metavar_decls(&delim.tts),
TokenTree::Token(..) => 0,
@ -551,18 +546,12 @@ impl TtParser {
mp.idx = idx_first;
self.cur_mps.push(mp);
}
&MatcherLoc::MetaVarDecl { span, kind, .. } => {
&MatcherLoc::MetaVarDecl { kind, .. } => {
// Built-in nonterminals never start with these tokens, so we can eliminate
// them from consideration. We use the span of the metavariable declaration
// to determine any edition-specific matching behavior for non-terminals.
if let Some(kind) = kind {
if Parser::nonterminal_may_begin_with(kind, token) {
self.bb_mps.push(mp);
}
} else {
// E.g. `$e` instead of `$e:expr`, reported as a hard error if actually used.
// Both this check and the one in `nameize` are necessary, surprisingly.
return Some(Error(span, "missing fragment specifier".to_string()));
if Parser::nonterminal_may_begin_with(kind, token) {
self.bb_mps.push(mp);
}
}
MatcherLoc::Eof => {
@ -666,11 +655,7 @@ impl TtParser {
let mut mp = self.bb_mps.pop().unwrap();
let loc = &matcher[mp.idx];
if let &MatcherLoc::MetaVarDecl {
span,
kind: Some(kind),
next_metavar,
seq_depth,
..
span, kind, next_metavar, seq_depth, ..
} = loc
{
// We use the span of the metavariable declaration to determine any
@ -715,7 +700,7 @@ impl TtParser {
.bb_mps
.iter()
.map(|mp| match &matcher[mp.idx] {
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
MatcherLoc::MetaVarDecl { bind, kind, .. } => {
format!("{kind} ('{bind}')")
}
_ => unreachable!(),
@ -745,19 +730,13 @@ impl TtParser {
// `NamedParseResult`. Otherwise, it's an error.
let mut ret_val = FxHashMap::default();
for loc in matcher {
if let &MatcherLoc::MetaVarDecl { span, bind, kind, .. } = loc {
if kind.is_some() {
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
Vacant(spot) => spot.insert(res.next().unwrap()),
Occupied(..) => {
return Error(span, format!("duplicated bind name: {bind}"));
}
};
} else {
// E.g. `$e` instead of `$e:expr`, reported as a hard error if actually used.
// Both this check and the one in `parse_tt_inner` are necessary, surprisingly.
return Error(span, "missing fragment specifier".to_string());
}
if let &MatcherLoc::MetaVarDecl { span, bind, .. } = loc {
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
Vacant(spot) => spot.insert(res.next().unwrap()),
Occupied(..) => {
return Error(span, format!("duplicated bind name: {bind}"));
}
};
}
}
Success(ret_val)

View file

@ -392,7 +392,7 @@ pub fn compile_declarative_macro(
let lhs_nm = Ident::new(sym::lhs, span);
let rhs_nm = Ident::new(sym::rhs, span);
let tt_spec = Some(NonterminalKind::TT);
let tt_spec = NonterminalKind::TT;
let macro_rules = macro_def.macro_rules;
// Parse the macro_rules! invocation
@ -407,9 +407,9 @@ pub fn compile_declarative_macro(
DelimSpan::dummy(),
mbe::SequenceRepetition {
tts: vec![
mbe::TokenTree::MetaVarDecl(span, lhs_nm, tt_spec),
mbe::TokenTree::MetaVarDecl { span, name: lhs_nm, kind: tt_spec },
mbe::TokenTree::token(token::FatArrow, span),
mbe::TokenTree::MetaVarDecl(span, rhs_nm, tt_spec),
mbe::TokenTree::MetaVarDecl { span, name: rhs_nm, kind: tt_spec },
],
separator: Some(Token::new(
if macro_rules { token::Semi } else { token::Comma },
@ -448,6 +448,7 @@ pub fn compile_declarative_macro(
match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) {
Success(m) => m,
Failure(()) => {
debug!("failed to parse macro tt");
// The fast `NoopTracker` doesn't have any info on failure, so we need to retry it
// with another one that gives us the information we need.
// For this we need to reclone the macro body as the previous parser consumed it.
@ -616,7 +617,7 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
let mut iter = seq.tts.iter().peekable();
while let Some(tt) = iter.next() {
match tt {
mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => {}
mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
let mut now = t;
while let Some(&mbe::TokenTree::Token(
@ -651,7 +652,7 @@ fn check_redundant_vis_repetition(
) {
let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
let is_vis = seq.tts.first().map_or(false, |tt| {
matches!(tt, mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)))
matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
});
if is_vis && is_zero_or_one {
@ -678,7 +679,7 @@ fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(),
match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarDecl { .. }
| TokenTree::MetaVarExpr(..) => (),
TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
TokenTree::Sequence(span, seq) => {
@ -777,7 +778,7 @@ impl<'tt> FirstSets<'tt> {
match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarDecl { .. }
| TokenTree::MetaVarExpr(..) => {
first.replace_with(TtHandle::TtRef(tt));
}
@ -845,7 +846,7 @@ impl<'tt> FirstSets<'tt> {
match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarDecl { .. }
| TokenTree::MetaVarExpr(..) => {
first.add_one(TtHandle::TtRef(tt));
return first;
@ -1084,7 +1085,7 @@ fn check_matcher_core<'tt>(
match token {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarDecl { .. }
| TokenTree::MetaVarExpr(..) => {
if token_can_be_followed_by_any(token) {
// don't need to track tokens that work with any,
@ -1152,7 +1153,7 @@ fn check_matcher_core<'tt>(
// Now `last` holds the complete set of NT tokens that could
// end the sequence before SUFFIX. Check that every one works with `suffix`.
for tt in &last.tokens {
if let &TokenTree::MetaVarDecl(span, name, Some(kind)) = tt.get() {
if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
for next_token in &suffix_first.tokens {
let next_token = next_token.get();
@ -1172,11 +1173,11 @@ fn check_matcher_core<'tt>(
)
{
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
span,
name,
Some(NonterminalKind::Pat(PatParam { inferred: false })),
));
kind: NonterminalKind::Pat(PatParam { inferred: false }),
});
sess.psess.buffer_lint(
RUST_2021_INCOMPATIBLE_OR_PATTERNS,
span,
@ -1212,11 +1213,11 @@ fn check_matcher_core<'tt>(
&& sess.psess.edition.at_least_rust_2021()
&& next_token.is_token(&token::Or)
{
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
span,
name,
Some(NonterminalKind::Pat(PatParam { inferred: false })),
));
kind: NonterminalKind::Pat(PatParam { inferred: false }),
});
err.span_suggestion(
span,
"try a `pat_param` fragment specifier instead",
@ -1254,7 +1255,7 @@ fn check_matcher_core<'tt>(
}
fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
if let mbe::TokenTree::MetaVarDecl(_, _, Some(kind)) = *tok {
if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
frag_can_be_followed_by_any(kind)
} else {
// (Non NT's can always be followed by anything in matchers.)
@ -1367,7 +1368,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
}
_ => IsInFollow::No(TOKENS),
},
TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Block)) => IsInFollow::Yes,
TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS),
}
}
@ -1400,11 +1401,10 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
}
}
},
TokenTree::MetaVarDecl(
_,
_,
Some(NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path),
) => IsInFollow::Yes,
TokenTree::MetaVarDecl {
kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
..
} => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS),
}
}
@ -1416,8 +1416,7 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match tt {
mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),
mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
_ => panic!(
"{}",
"unexpected mbe::TokenTree::{Sequence or Delimited} \

View file

@ -54,66 +54,78 @@ pub(super) fn parse(
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition);
match tree {
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
// Not consuming the next token immediately, as it may not be a colon
let span = match iter.peek() {
Some(&tokenstream::TokenTree::Token(
Token { kind: token::Colon, span: colon_span },
_,
)) => {
// Consume the colon first
iter.next();
// It's ok to consume the next tree no matter how,
// since if it's not a token then it will be an invalid declaration.
match iter.next() {
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
Some((fragment, _)) => {
let span = token.span.with_lo(start_sp.lo());
let edition = || {
// FIXME(#85708) - once we properly decode a foreign
// crate's `SyntaxContext::root`, then we can replace
// this with just `span.edition()`. A
// `SyntaxContext::root()` from the current crate will
// have the edition of the current crate, and a
// `SyntaxContext::root()` from a foreign crate will
// have the edition of that crate (which we manually
// retrieve via the `edition` parameter).
if !span.from_expansion() {
edition
} else {
span.edition()
}
};
let kind = NonterminalKind::from_symbol(fragment.name, edition)
.unwrap_or_else(|| {
sess.dcx().emit_err(errors::InvalidFragmentSpecifier {
span,
fragment,
help: VALID_FRAGMENT_NAMES_MSG.into(),
});
NonterminalKind::Ident
});
result.push(TokenTree::MetaVarDecl(span, ident, Some(kind)));
continue;
}
_ => token.span,
},
// Invalid, return a nice source location
_ => colon_span.with_lo(start_sp.lo()),
}
}
// Whether it's none or some other tree, it doesn't belong to
// the current meta variable, returning the original span.
_ => start_sp,
};
if !parsing_patterns {
// No matchers allowed, nothing to process here
result.push(tree);
continue;
}
result.push(TokenTree::MetaVarDecl(span, ident, None));
}
let TokenTree::MetaVar(start_sp, ident) = tree else {
// Not a metavariable, just return the tree
result.push(tree);
continue;
};
// Not a metavar or no matchers allowed, so just return the tree
_ => result.push(tree),
// Push a metavariable with no fragment specifier at the given span
let mut missing_fragment_specifier = |span| {
sess.dcx().emit_err(errors::MissingFragmentSpecifier {
span,
add_span: span.shrink_to_hi(),
valid: VALID_FRAGMENT_NAMES_MSG,
});
// Fall back to a `TokenTree` since that will match anything if we continue expanding.
result.push(TokenTree::MetaVarDecl { span, name: ident, kind: NonterminalKind::TT });
};
// Not consuming the next token immediately, as it may not be a colon
if let Some(peek) = iter.peek()
&& let tokenstream::TokenTree::Token(token, _spacing) = peek
&& let Token { kind: token::Colon, span: colon_span } = token
{
// Next token is a colon; consume it
iter.next();
// It's ok to consume the next tree no matter how,
// since if it's not a token then it will be an invalid declaration.
let Some(tokenstream::TokenTree::Token(token, _)) = iter.next() else {
// Invalid, return a nice source location as `var:`
missing_fragment_specifier(colon_span.with_lo(start_sp.lo()));
continue;
};
let Some((fragment, _)) = token.ident() else {
// No identifier for the fragment specifier;
missing_fragment_specifier(token.span);
continue;
};
let span = token.span.with_lo(start_sp.lo());
let edition = || {
// FIXME(#85708) - once we properly decode a foreign
// crate's `SyntaxContext::root`, then we can replace
// this with just `span.edition()`. A
// `SyntaxContext::root()` from the current crate will
// have the edition of the current crate, and a
// `SyntaxContext::root()` from a foreign crate will
// have the edition of that crate (which we manually
// retrieve via the `edition` parameter).
if !span.from_expansion() { edition } else { span.edition() }
};
let kind = NonterminalKind::from_symbol(fragment.name, edition).unwrap_or_else(|| {
sess.dcx().emit_err(errors::InvalidFragmentSpecifier {
span,
fragment,
help: VALID_FRAGMENT_NAMES_MSG,
});
NonterminalKind::TT
});
result.push(TokenTree::MetaVarDecl { span, name: ident, kind });
} else {
// Whether it's none or some other tree, it doesn't belong to
// the current meta variable, returning the original span.
missing_fragment_specifier(start_sp);
}
}
result

View file

@ -445,7 +445,7 @@ pub(super) fn transcribe<'a>(
}
// There should be no meta-var declarations in the invocation of a macro.
mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl`"),
mbe::TokenTree::MetaVarDecl { .. } => panic!("unexpected `TokenTree::MetaVarDecl`"),
}
}
}
@ -639,7 +639,7 @@ fn lockstep_iter_size(
size.with(lockstep_iter_size(tt, interpolations, repeats))
})
}
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
let name = MacroRulesNormalizedIdent::new(*name);
match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched {

View file

@ -332,9 +332,9 @@ impl MutVisitor for PlaceholderExpander {
}
}
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
fn visit_expr(&mut self, expr: &mut ast::Expr) {
match expr.kind {
ast::ExprKind::MacCall(_) => *expr = self.remove(expr.id).make_expr(),
ast::ExprKind::MacCall(_) => *expr = *self.remove(expr.id).make_expr(),
_ => walk_expr(self, expr),
}
}
@ -399,16 +399,16 @@ impl MutVisitor for PlaceholderExpander {
stmts
}
fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
fn visit_pat(&mut self, pat: &mut ast::Pat) {
match pat.kind {
ast::PatKind::MacCall(_) => *pat = self.remove(pat.id).make_pat(),
ast::PatKind::MacCall(_) => *pat = *self.remove(pat.id).make_pat(),
_ => walk_pat(self, pat),
}
}
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
fn visit_ty(&mut self, ty: &mut ast::Ty) {
match ty.kind {
ast::TyKind::MacCall(_) => *ty = self.remove(ty.id).make_ty(),
ast::TyKind::MacCall(_) => *ty = *self.remove(ty.id).make_ty(),
_ => walk_ty(self, ty),
}
}

View file

@ -237,6 +237,8 @@ declare_features! (
(internal, profiler_runtime, "1.18.0", None),
/// Allows using `rustc_*` attributes (RFC 572).
(internal, rustc_attrs, "1.0.0", None),
/// Introduces a hierarchy of `Sized` traits (RFC 3729).
(unstable, sized_hierarchy, "CURRENT_RUSTC_VERSION", None),
/// Allows using the `#[stable]` and `#[unstable]` attributes.
(internal, staged_api, "1.0.0", None),
/// Added for testing unstable lints; perma-unstable.

View file

@ -165,6 +165,8 @@ pub fn extract(attrs: &[impl AttributeExt]) -> Option<(Symbol, Span)> {
language_item_table! {
// Variant name, Name, Getter method name, Target Generic requirements;
Sized, sym::sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
MetaSized, sym::meta_sized, meta_sized_trait, Target::Trait, GenericRequirement::Exact(0);
PointeeSized, sym::pointee_sized, pointee_sized_trait, Target::Trait, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1);
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None;

View file

@ -76,6 +76,7 @@ pub use check::{check_abi, check_abi_fn_ptr, check_custom_abi};
use rustc_abi::{ExternAbi, VariantIdx};
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_errors::{Diag, ErrorGuaranteed, pluralize, struct_span_code_err};
use rustc_hir::LangItem;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_index::bit_set::DenseBitSet;
@ -339,7 +340,7 @@ fn bounds_from_generic_predicates<'tcx>(
ty::ClauseKind::Trait(trait_predicate) => {
let entry = types.entry(trait_predicate.self_ty()).or_default();
let def_id = trait_predicate.def_id();
if !tcx.is_default_trait(def_id) {
if !tcx.is_default_trait(def_id) && !tcx.is_lang_item(def_id, LangItem::Sized) {
// Do not add that restriction to the list if it is a positive requirement.
entry.push(trait_predicate.def_id());
}

View file

@ -1258,6 +1258,11 @@ fn check_trait(tcx: TyCtxt<'_>, item: &hir::Item<'_>) -> Result<(), ErrorGuarant
debug!(?item.owner_id);
let def_id = item.owner_id.def_id;
if tcx.is_lang_item(def_id.into(), LangItem::PointeeSized) {
// `PointeeSized` is removed during lowering.
return Ok(());
}
let trait_def = tcx.trait_def(def_id);
if trait_def.is_marker
|| matches!(trait_def.specialization_kind, TraitSpecializationKind::Marker)

View file

@ -44,6 +44,14 @@ fn associated_type_bounds<'tcx>(
| PredicateFilter::SelfOnly
| PredicateFilter::SelfTraitThatDefines(_)
| PredicateFilter::SelfAndAssociatedTypeBounds => {
icx.lowerer().add_sizedness_bounds(
&mut bounds,
item_ty,
hir_bounds,
None,
None,
span,
);
icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span);
}
// `ConstIfConst` is only interested in `~const` bounds.
@ -333,6 +341,14 @@ fn opaque_type_bounds<'tcx>(
| PredicateFilter::SelfOnly
| PredicateFilter::SelfTraitThatDefines(_)
| PredicateFilter::SelfAndAssociatedTypeBounds => {
icx.lowerer().add_sizedness_bounds(
&mut bounds,
item_ty,
hir_bounds,
None,
None,
span,
);
icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span);
}
//`ConstIfConst` is only interested in `~const` bounds.

View file

@ -162,7 +162,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
.map(|t| ty::Binder::dummy(t.instantiate_identity()));
}
}
ItemKind::Trait(_, _, _, _, self_bounds, ..)
| ItemKind::TraitAlias(_, _, self_bounds) => {
is_trait = Some((self_bounds, item.span));
@ -183,21 +182,29 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
// and the explicit where-clauses, but to get the full set of predicates
// on a trait we must also consider the bounds that follow the trait's name,
// like `trait Foo: A + B + C`.
if let Some(self_bounds) = is_trait {
if let Some((self_bounds, span)) = is_trait {
let mut bounds = Vec::new();
icx.lowerer().lower_bounds(
tcx.types.self_param,
self_bounds.0,
self_bounds,
&mut bounds,
ty::List::empty(),
PredicateFilter::All,
);
icx.lowerer().add_sizedness_bounds(
&mut bounds,
tcx.types.self_param,
self_bounds,
None,
Some(def_id),
span,
);
icx.lowerer().add_default_super_traits(
def_id,
&mut bounds,
self_bounds.0,
self_bounds,
hir_generics,
self_bounds.1,
span,
);
predicates.extend(bounds);
}
@ -224,6 +231,14 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
let param_ty = icx.lowerer().lower_ty_param(param.hir_id);
let mut bounds = Vec::new();
// Implicit bounds are added to type params unless a `?Trait` bound is found
icx.lowerer().add_sizedness_bounds(
&mut bounds,
param_ty,
&[],
Some((param.def_id, hir_generics.predicates)),
None,
param.span,
);
icx.lowerer().add_default_traits(
&mut bounds,
param_ty,

View file

@ -4,15 +4,15 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::codes::*;
use rustc_errors::struct_span_code_err;
use rustc_hir as hir;
use rustc_hir::AmbigArg;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{AmbigArg, LangItem, PolyTraitRef};
use rustc_middle::bug;
use rustc_middle::ty::{
self as ty, IsSuggestable, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
TypeVisitor, Upcast,
};
use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym};
use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw};
use rustc_trait_selection::traits;
use smallvec::SmallVec;
use tracing::{debug, instrument};
@ -23,23 +23,213 @@ use crate::hir_ty_lowering::{
AssocItemQSelf, FeedConstTy, HirTyLowerer, PredicateFilter, RegionInferReason,
};
#[derive(Debug, Default)]
struct CollectedBound {
/// `Trait`
positive: bool,
/// `?Trait`
maybe: bool,
/// `!Trait`
negative: bool,
}
impl CollectedBound {
/// Returns `true` if any of `Trait`, `?Trait` or `!Trait` were encountered.
fn any(&self) -> bool {
self.positive || self.maybe || self.negative
}
}
#[derive(Debug)]
struct CollectedSizednessBounds {
// Collected `Sized` bounds
sized: CollectedBound,
// Collected `MetaSized` bounds
meta_sized: CollectedBound,
// Collected `PointeeSized` bounds
pointee_sized: CollectedBound,
}
impl CollectedSizednessBounds {
/// Returns `true` if any of `Trait`, `?Trait` or `!Trait` were encountered for `Sized`,
/// `MetaSized` or `PointeeSized`.
fn any(&self) -> bool {
self.sized.any() || self.meta_sized.any() || self.pointee_sized.any()
}
}
fn search_bounds_for<'tcx>(
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
mut f: impl FnMut(&'tcx PolyTraitRef<'tcx>),
) {
let mut search_bounds = |hir_bounds: &'tcx [hir::GenericBound<'tcx>]| {
for hir_bound in hir_bounds {
let hir::GenericBound::Trait(ptr) = hir_bound else {
continue;
};
f(ptr)
}
};
search_bounds(hir_bounds);
if let Some((self_ty, where_clause)) = self_ty_where_predicates {
for clause in where_clause {
if let hir::WherePredicateKind::BoundPredicate(pred) = clause.kind
&& pred.is_param_bound(self_ty.to_def_id())
{
search_bounds(pred.bounds);
}
}
}
}
fn collect_unbounds<'tcx>(
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
) -> SmallVec<[&'tcx PolyTraitRef<'tcx>; 1]> {
let mut unbounds: SmallVec<[_; 1]> = SmallVec::new();
search_bounds_for(hir_bounds, self_ty_where_predicates, |ptr| {
if matches!(ptr.modifiers.polarity, hir::BoundPolarity::Maybe(_)) {
unbounds.push(ptr);
}
});
unbounds
}
fn collect_bounds<'a, 'tcx>(
hir_bounds: &'a [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
target_did: DefId,
) -> CollectedBound {
let mut collect_into = CollectedBound::default();
search_bounds_for(hir_bounds, self_ty_where_predicates, |ptr| {
if !matches!(ptr.trait_ref.path.res, Res::Def(DefKind::Trait, did) if did == target_did) {
return;
}
match ptr.modifiers.polarity {
hir::BoundPolarity::Maybe(_) => collect_into.maybe = true,
hir::BoundPolarity::Negative(_) => collect_into.negative = true,
hir::BoundPolarity::Positive => collect_into.positive = true,
}
});
collect_into
}
fn collect_sizedness_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
span: Span,
) -> CollectedSizednessBounds {
let sized_did = tcx.require_lang_item(LangItem::Sized, span);
let sized = collect_bounds(hir_bounds, self_ty_where_predicates, sized_did);
let meta_sized_did = tcx.require_lang_item(LangItem::MetaSized, span);
let meta_sized = collect_bounds(hir_bounds, self_ty_where_predicates, meta_sized_did);
let pointee_sized_did = tcx.require_lang_item(LangItem::PointeeSized, span);
let pointee_sized = collect_bounds(hir_bounds, self_ty_where_predicates, pointee_sized_did);
CollectedSizednessBounds { sized, meta_sized, pointee_sized }
}
/// Add a trait bound for `did`.
fn add_trait_bound<'tcx>(
tcx: TyCtxt<'tcx>,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
did: DefId,
span: Span,
) {
let trait_ref = ty::TraitRef::new(tcx, did, [self_ty]);
// Preferable to put sizedness obligations first, since we report better errors for `Sized`
// ambiguity.
bounds.insert(0, (trait_ref.upcast(tcx), span));
}
impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
pub(crate) fn add_default_traits(
/// Skip `PointeeSized` bounds.
///
/// `PointeeSized` is a "fake bound" insofar as anywhere a `PointeeSized` bound exists, there
/// is actually the absence of any bounds. This avoids limitations around non-global where
/// clauses being preferred over item bounds (where `PointeeSized` bounds would be
/// proven) - which can result in errors when a `PointeeSized` supertrait/bound/predicate is
/// added to some items.
pub(crate) fn should_skip_sizedness_bound<'hir>(
&self,
bound: &'hir hir::GenericBound<'tcx>,
) -> bool {
bound
.trait_ref()
.and_then(|tr| tr.trait_def_id())
.map(|did| self.tcx().is_lang_item(did, LangItem::PointeeSized))
.unwrap_or(false)
}
/// Adds sizedness bounds to a trait, trait alias, parameter, opaque type or associated type.
///
/// - On parameters, opaque type and associated types, add default `Sized` bound if no explicit
/// sizedness bounds are present.
/// - On traits and trait aliases, add default `MetaSized` supertrait if no explicit sizedness
/// bounds are present.
/// - On parameters, opaque type, associated types and trait aliases, add a `MetaSized` bound if
/// a `?Sized` bound is present.
pub(crate) fn add_sizedness_bounds(
&self,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
hir_bounds: &[hir::GenericBound<'tcx>],
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
trait_did: Option<LocalDefId>,
span: Span,
) {
self.add_default_traits_with_filter(
bounds,
self_ty,
hir_bounds,
self_ty_where_predicates,
span,
|_| true,
);
let tcx = self.tcx();
let meta_sized_did = tcx.require_lang_item(LangItem::MetaSized, span);
let pointee_sized_did = tcx.require_lang_item(LangItem::PointeeSized, span);
// If adding sizedness bounds to a trait, then there are some relevant early exits
if let Some(trait_did) = trait_did {
let trait_did = trait_did.to_def_id();
// Never add a default supertrait to `PointeeSized`.
if trait_did == pointee_sized_did {
return;
}
// Don't add default sizedness supertraits to auto traits because it isn't possible to
// relax an automatically added supertrait on the defn itself.
if tcx.trait_is_auto(trait_did) {
return;
}
} else {
// Report invalid unbounds on sizedness-bounded generic parameters.
let unbounds = collect_unbounds(hir_bounds, self_ty_where_predicates);
self.check_and_report_invalid_unbounds_on_param(unbounds);
}
let collected = collect_sizedness_bounds(tcx, hir_bounds, self_ty_where_predicates, span);
if (collected.sized.maybe || collected.sized.negative)
&& !collected.sized.positive
&& !collected.meta_sized.any()
&& !collected.pointee_sized.any()
{
// `?Sized` is equivalent to `MetaSized` (but only add the bound if there aren't any
// other explicit ones) - this can happen for trait aliases as well as bounds.
add_trait_bound(tcx, bounds, self_ty, meta_sized_did, span);
} else if !collected.any() {
if trait_did.is_some() {
// If there are no explicit sizedness bounds on a trait then add a default
// `MetaSized` supertrait.
add_trait_bound(tcx, bounds, self_ty, meta_sized_did, span);
} else {
// If there are no explicit sizedness bounds on a parameter then add a default
// `Sized` bound.
let sized_did = tcx.require_lang_item(LangItem::Sized, span);
add_trait_bound(tcx, bounds, self_ty, sized_did, span);
}
}
}
/// Checks whether `Self: DefaultAutoTrait` bounds should be added on trait super bounds
@ -146,13 +336,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
if !self.requires_default_supertraits(trait_bounds, trait_generics) {
let self_ty_where_predicates = (parent, trait_item.generics.predicates);
self.add_default_traits_with_filter(
self.add_default_traits(
bounds,
tcx.types.self_param,
&[],
Some(self_ty_where_predicates),
trait_item.span,
|tr| tr != hir::LangItem::Sized,
);
}
}
@ -174,41 +363,37 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
assert!(matches!(self.tcx().def_kind(trait_def_id), DefKind::Trait | DefKind::TraitAlias));
if self.requires_default_supertraits(hir_bounds, hir_generics) {
let self_ty_where_predicates = (trait_def_id, hir_generics.predicates);
self.add_default_traits_with_filter(
self.add_default_traits(
bounds,
self.tcx().types.self_param,
hir_bounds,
Some(self_ty_where_predicates),
span,
|default_trait| default_trait != hir::LangItem::Sized,
);
}
}
pub(crate) fn add_default_traits_with_filter(
pub(crate) fn add_default_traits(
&self,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
hir_bounds: &[hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
span: Span,
f: impl Fn(hir::LangItem) -> bool,
) {
self.tcx().default_traits().iter().filter(|&&default_trait| f(default_trait)).for_each(
|default_trait| {
self.add_default_trait(
*default_trait,
bounds,
self_ty,
hir_bounds,
self_ty_where_predicates,
span,
);
},
);
self.tcx().default_traits().iter().for_each(|default_trait| {
self.add_default_trait(
*default_trait,
bounds,
self_ty,
hir_bounds,
self_ty_where_predicates,
span,
);
});
}
/// Add a `Sized` or `experimental_default_bounds` bounds to the `bounds` if appropriate.
/// Add a `experimental_default_bounds` bound to the `bounds` if appropriate.
///
/// Doesn't add the bound if the HIR bounds contain any of `Trait`, `?Trait` or `!Trait`.
pub(crate) fn add_default_trait(
@ -220,7 +405,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
span: Span,
) {
let trait_id = self.tcx().lang_items().get(trait_);
let tcx = self.tcx();
let trait_id = tcx.lang_items().get(trait_);
if let Some(trait_id) = trait_id
&& self.do_not_provide_default_trait_bound(
trait_id,
@ -228,11 +414,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
self_ty_where_predicates,
)
{
// There was no `?Trait` or `!Trait` bound;
// add `Trait` if it's available.
let trait_ref = ty::TraitRef::new(self.tcx(), trait_id, [self_ty]);
// Preferable to put this obligation first, since we report better errors for sized ambiguity.
bounds.insert(0, (trait_ref.upcast(self.tcx()), span));
add_trait_bound(tcx, bounds, self_ty, trait_id, span);
}
}
@ -242,90 +424,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
hir_bounds: &'a [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
) -> bool {
let tcx = self.tcx();
let mut seen_negative_bound = false;
let mut seen_positive_bound = false;
// Try to find an unbound in bounds.
let mut unbounds: SmallVec<[_; 1]> = SmallVec::new();
let mut search_bounds = |hir_bounds: &'a [hir::GenericBound<'tcx>]| {
for hir_bound in hir_bounds {
let hir::GenericBound::Trait(ptr) = hir_bound else {
continue;
};
match ptr.modifiers.polarity {
hir::BoundPolarity::Maybe(_) => unbounds.push(ptr),
hir::BoundPolarity::Negative(_) => {
if ptr.trait_ref.path.res == Res::Def(DefKind::Trait, trait_def_id) {
seen_negative_bound = true;
}
}
hir::BoundPolarity::Positive => {
if ptr.trait_ref.path.res == Res::Def(DefKind::Trait, trait_def_id) {
seen_positive_bound = true;
}
}
}
}
};
search_bounds(hir_bounds);
if let Some((self_ty, where_clause)) = self_ty_where_predicates {
for clause in where_clause {
if let hir::WherePredicateKind::BoundPredicate(pred) = clause.kind
&& pred.is_param_bound(self_ty.to_def_id())
{
search_bounds(pred.bounds);
}
}
}
let mut unique_bounds = FxIndexSet::default();
let mut seen_repeat = false;
for unbound in &unbounds {
if let Res::Def(DefKind::Trait, unbound_def_id) = unbound.trait_ref.path.res {
seen_repeat |= !unique_bounds.insert(unbound_def_id);
}
}
if unbounds.len() > 1 {
let err = errors::MultipleRelaxedDefaultBounds {
spans: unbounds.iter().map(|ptr| ptr.span).collect(),
};
if seen_repeat {
self.dcx().emit_err(err);
} else if !tcx.features().more_maybe_bounds() {
self.tcx().sess.create_feature_err(err, sym::more_maybe_bounds).emit();
};
}
let mut seen_unbound = false;
for unbound in unbounds {
let unbound_def_id = unbound.trait_ref.trait_def_id();
if unbound_def_id == Some(trait_def_id) {
seen_unbound = true;
}
let emit_relax_err = || {
let unbound_traits = match tcx.sess.opts.unstable_opts.experimental_default_bounds {
true => "`?Sized` and `experimental_default_bounds`",
false => "`?Sized`",
};
// There was a `?Trait` bound, but it was neither `?Sized` nor `experimental_default_bounds`.
self.dcx().span_err(
unbound.span,
format!(
"relaxing a default bound only does something for {}; \
all other traits are not bound by default",
unbound_traits
),
);
};
match unbound_def_id {
Some(def_id) if !tcx.is_default_trait(def_id) => emit_relax_err(),
None => emit_relax_err(),
_ => {}
}
}
!(seen_unbound || seen_negative_bound || seen_positive_bound)
let collected = collect_bounds(hir_bounds, self_ty_where_predicates, trait_def_id);
!collected.any()
}
/// Lower HIR bounds into `bounds` given the self type `param_ty` and the overarching late-bound vars if any.
@ -361,6 +461,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
'tcx: 'hir,
{
for hir_bound in hir_bounds {
if self.should_skip_sizedness_bound(hir_bound) {
continue;
}
// In order to avoid cycles, when we're lowering `SelfTraitThatDefines`,
// we skip over any traits that don't define the given associated type.
if let PredicateFilter::SelfTraitThatDefines(assoc_ident) = predicate_filter {

View file

@ -61,14 +61,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
let ast_bounds: Vec<_> =
hir_bounds.iter().map(|&trait_ref| hir::GenericBound::Trait(trait_ref)).collect();
self.add_default_traits_with_filter(
&mut user_written_bounds,
dummy_self,
&ast_bounds,
None,
span,
|tr| tr != hir::LangItem::Sized,
);
self.add_default_traits(&mut user_written_bounds, dummy_self, &ast_bounds, None, span);
let (elaborated_trait_bounds, elaborated_projection_bounds) =
traits::expand_trait_aliases(tcx, user_written_bounds.iter().copied());

View file

@ -8,7 +8,7 @@ use rustc_errors::{
};
use rustc_hir::def::{CtorOf, DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, HirId};
use rustc_hir::{self as hir, HirId, LangItem, PolyTraitRef};
use rustc_middle::bug;
use rustc_middle::ty::fast_reject::{TreatParams, simplify_type};
use rustc_middle::ty::print::{PrintPolyTraitRefExt as _, PrintTraitRefExt as _};
@ -34,6 +34,57 @@ use crate::fluent_generated as fluent;
use crate::hir_ty_lowering::{AssocItemQSelf, HirTyLowerer};
impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
/// Check for multiple relaxed default bounds and relaxed bounds of non-sizedness traits.
pub(crate) fn check_and_report_invalid_unbounds_on_param(
&self,
unbounds: SmallVec<[&PolyTraitRef<'_>; 1]>,
) {
let tcx = self.tcx();
let sized_did = tcx.require_lang_item(LangItem::Sized, DUMMY_SP);
let mut unique_bounds = FxIndexSet::default();
let mut seen_repeat = false;
for unbound in &unbounds {
if let Res::Def(DefKind::Trait, unbound_def_id) = unbound.trait_ref.path.res {
seen_repeat |= !unique_bounds.insert(unbound_def_id);
}
}
if unbounds.len() > 1 {
let err = errors::MultipleRelaxedDefaultBounds {
spans: unbounds.iter().map(|ptr| ptr.span).collect(),
};
if seen_repeat {
tcx.dcx().emit_err(err);
} else if !tcx.features().more_maybe_bounds() {
tcx.sess.create_feature_err(err, sym::more_maybe_bounds).emit();
};
}
for unbound in unbounds {
if let Res::Def(DefKind::Trait, did) = unbound.trait_ref.path.res
&& ((did == sized_did) || tcx.is_default_trait(did))
{
continue;
}
let unbound_traits = match tcx.sess.opts.unstable_opts.experimental_default_bounds {
true => "`?Sized` and `experimental_default_bounds`",
false => "`?Sized`",
};
self.dcx().span_err(
unbound.span,
format!(
"relaxing a default bound only does something for {}; all other traits are \
not bound by default",
unbound_traits
),
);
}
}
/// On missing type parameters, emit an E0393 error and provide a structured suggestion using
/// the type parameter's name as a placeholder.
pub(crate) fn report_missing_type_params(

View file

@ -22,8 +22,9 @@ use rustc_infer::infer::{DefineOpaqueTypes, InferResult};
use rustc_lint::builtin::SELF_CONSTRUCTOR_FROM_OUTER_ITEM;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
use rustc_middle::ty::{
self, AdtKind, CanonicalUserType, GenericArgsRef, GenericParamDefKind, IsIdentity, Ty, TyCtxt,
TypeFoldable, TypeVisitable, TypeVisitableExt, UserArgs, UserSelfTy,
self, AdtKind, CanonicalUserType, GenericArgsRef, GenericParamDefKind, IsIdentity,
SizedTraitKind, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitableExt, UserArgs,
UserSelfTy,
};
use rustc_middle::{bug, span_bug};
use rustc_session::lint;
@ -439,7 +440,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|| {},
);
// Sized types have static alignment, and so do slices.
if tail.is_trivially_sized(self.tcx) || matches!(tail.kind(), ty::Slice(..)) {
if tail.has_trivial_sizedness(self.tcx, SizedTraitKind::Sized)
|| matches!(tail.kind(), ty::Slice(..))
{
// Nothing else is required here.
} else {
// We can't be sure, let's required full `Sized`.

View file

@ -738,7 +738,8 @@ lint_redundant_semicolons =
[true] semicolons
*[false] semicolon
}
.suggestion = remove {$multiple ->
lint_redundant_semicolons_suggestion = remove {$multiple_semicolons ->
[true] these semicolons
*[false] this semicolon
}

View file

@ -1538,8 +1538,16 @@ pub(crate) struct PassByValueDiag {
#[diag(lint_redundant_semicolons)]
pub(crate) struct RedundantSemicolonsDiag {
pub multiple: bool,
#[suggestion(code = "", applicability = "maybe-incorrect")]
pub suggestion: Span,
#[subdiagnostic]
pub suggestion: Option<RedundantSemicolonsSuggestion>,
}
#[derive(Subdiagnostic)]
#[suggestion(lint_redundant_semicolons_suggestion, code = "", applicability = "maybe-incorrect")]
pub(crate) struct RedundantSemicolonsSuggestion {
pub multiple_semicolons: bool,
#[primary_span]
pub span: Span,
}
// traits.rs

View file

@ -46,7 +46,8 @@ impl<'tcx> LateLintPass<'tcx> for MultipleSupertraitUpcastable {
.tcx
.explicit_super_predicates_of(def_id)
.iter_identity_copied()
.filter_map(|(pred, _)| pred.as_trait_clause());
.filter_map(|(pred, _)| pred.as_trait_clause())
.filter(|pred| !cx.tcx.is_lang_item(pred.def_id(), hir::LangItem::MetaSized));
if direct_super_traits_iter.count() > 1 {
cx.emit_span_lint(
MULTIPLE_SUPERTRAIT_UPCASTABLE,

View file

@ -2,7 +2,7 @@ use rustc_ast::{Block, StmtKind};
use rustc_session::{declare_lint, declare_lint_pass};
use rustc_span::Span;
use crate::lints::RedundantSemicolonsDiag;
use crate::lints::{RedundantSemicolonsDiag, RedundantSemicolonsSuggestion};
use crate::{EarlyContext, EarlyLintPass, LintContext};
declare_lint! {
@ -44,16 +44,21 @@ impl EarlyLintPass for RedundantSemicolons {
fn maybe_lint_redundant_semis(cx: &EarlyContext<'_>, seq: &mut Option<(Span, bool)>) {
if let Some((span, multiple)) = seq.take() {
// FIXME: Find a better way of ignoring the trailing
// semicolon from macro expansion
if span == rustc_span::DUMMY_SP {
return;
}
// Ignore redundant semicolons inside macro expansion.(issue #142143)
let suggestion = if span.from_expansion() {
None
} else {
Some(RedundantSemicolonsSuggestion { multiple_semicolons: multiple, span })
};
cx.emit_span_lint(
REDUNDANT_SEMICOLONS,
span,
RedundantSemicolonsDiag { multiple, suggestion: span },
RedundantSemicolonsDiag { multiple, suggestion },
);
}
}

View file

@ -28,6 +28,7 @@
#![allow(internal_features)]
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![cfg_attr(not(bootstrap), feature(sized_hierarchy))]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![doc(rust_logo)]
#![feature(allocator_api)]

View file

@ -328,6 +328,14 @@ impl Key for (DefId, SimplifiedType) {
}
}
impl Key for (DefId, ty::SizedTraitKind) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for GenericArgsRef<'tcx> {
type Cache<V> = DefaultCache<Self, V>;

View file

@ -137,8 +137,8 @@ use crate::ty::layout::ValidityRequirement;
use crate::ty::print::{PrintTraitRefExt, describe_as_module};
use crate::ty::util::AlwaysRequiresDrop;
use crate::ty::{
self, CrateInherentImpls, GenericArg, GenericArgsRef, PseudoCanonicalInput, Ty, TyCtxt,
TyCtxtFeed,
self, CrateInherentImpls, GenericArg, GenericArgsRef, PseudoCanonicalInput, SizedTraitKind, Ty,
TyCtxt, TyCtxtFeed,
};
use crate::{dep_graph, mir, thir};
@ -910,9 +910,10 @@ rustc_queries! {
cache_on_disk_if { key.is_local() }
separate_provide_extern
}
query adt_sized_constraint(key: DefId) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
desc { |tcx| "computing the `Sized` constraint for `{}`", tcx.def_path_str(key) }
query adt_sizedness_constraint(
key: (DefId, SizedTraitKind)
) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
desc { |tcx| "computing the sizedness constraint for `{}`", tcx.def_path_str(key.0) }
}
query adt_dtorck_constraint(

View file

@ -229,8 +229,12 @@ impl<'tcx> rustc_type_ir::inherent::AdtDef<TyCtxt<'tcx>> for AdtDef<'tcx> {
)
}
fn sized_constraint(self, tcx: TyCtxt<'tcx>) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
self.sized_constraint(tcx)
fn sizedness_constraint(
self,
tcx: TyCtxt<'tcx>,
sizedness: ty::SizedTraitKind,
) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
self.sizedness_constraint(tcx, sizedness)
}
fn is_fundamental(self) -> bool {
@ -634,10 +638,15 @@ impl<'tcx> AdtDef<'tcx> {
tcx.adt_async_destructor(self.did())
}
/// Returns a type such that `Self: Sized` if and only if that type is `Sized`,
/// or `None` if the type is always sized.
pub fn sized_constraint(self, tcx: TyCtxt<'tcx>) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
if self.is_struct() { tcx.adt_sized_constraint(self.did()) } else { None }
/// If this ADT is a struct, returns a type such that `Self: {Meta,Pointee,}Sized` if and only
/// if that type is `{Meta,Pointee,}Sized`, or `None` if this ADT is always
/// `{Meta,Pointee,}Sized`.
pub fn sizedness_constraint(
self,
tcx: TyCtxt<'tcx>,
sizedness: ty::SizedTraitKind,
) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
if self.is_struct() { tcx.adt_sizedness_constraint((self.did(), sizedness)) } else { None }
}
}

View file

@ -13,7 +13,7 @@ use std::marker::DiscriminantKind;
use rustc_abi::{FieldIdx, VariantIdx};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LocalDefId;
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::{Decodable, Encodable, PointeeSized};
use rustc_span::source_map::Spanned;
use rustc_span::{Span, SpanDecoder, SpanEncoder};
@ -96,7 +96,7 @@ impl<'tcx, E: TyEncoder<'tcx>> EncodableWithShorthand<'tcx, E> for ty::Predicate
///
/// `Decodable` can still be implemented in cases where `Decodable` is required
/// by a trait bound.
pub trait RefDecodable<'tcx, D: TyDecoder<'tcx>> {
pub trait RefDecodable<'tcx, D: TyDecoder<'tcx>>: PointeeSized {
fn decode(d: &mut D) -> &'tcx Self;
}

View file

@ -43,6 +43,7 @@ use rustc_macros::{HashStable, TyDecodable, TyEncodable};
use rustc_query_system::cache::WithDepNode;
use rustc_query_system::dep_graph::DepNodeIndex;
use rustc_query_system::ich::StableHashingContext;
use rustc_serialize::PointeeSized;
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_session::config::CrateType;
use rustc_session::cstore::{CrateStoreDyn, Untracked};
@ -774,7 +775,9 @@ bidirectional_lang_item_map! {
FutureOutput,
Iterator,
Metadata,
MetaSized,
Option,
PointeeSized,
PointeeTrait,
Poll,
Sized,
@ -1616,16 +1619,17 @@ impl<'tcx> TyCtxt<'tcx> {
self.reserve_and_set_memory_dedup(alloc, salt)
}
/// Traits added on all bounds by default, excluding `Sized` which is treated separately.
pub fn default_traits(self) -> &'static [rustc_hir::LangItem] {
match self.sess.opts.unstable_opts.experimental_default_bounds {
true => &[
LangItem::Sized,
if self.sess.opts.unstable_opts.experimental_default_bounds {
&[
LangItem::DefaultTrait1,
LangItem::DefaultTrait2,
LangItem::DefaultTrait3,
LangItem::DefaultTrait4,
],
false => &[LangItem::Sized],
]
} else {
&[]
}
}
@ -2538,17 +2542,17 @@ impl<'tcx> TyCtxt<'tcx> {
// this type just holds a pointer to it, but it still effectively owns it. It
// impls `Borrow` so that it can be looked up using the original
// (non-arena-memory-owning) types.
struct InternedInSet<'tcx, T: ?Sized>(&'tcx T);
struct InternedInSet<'tcx, T: ?Sized + PointeeSized>(&'tcx T);
impl<'tcx, T: 'tcx + ?Sized> Clone for InternedInSet<'tcx, T> {
impl<'tcx, T: 'tcx + ?Sized + PointeeSized> Clone for InternedInSet<'tcx, T> {
fn clone(&self) -> Self {
InternedInSet(self.0)
}
}
impl<'tcx, T: 'tcx + ?Sized> Copy for InternedInSet<'tcx, T> {}
impl<'tcx, T: 'tcx + ?Sized + PointeeSized> Copy for InternedInSet<'tcx, T> {}
impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
impl<'tcx, T: 'tcx + ?Sized + PointeeSized> IntoPointer for InternedInSet<'tcx, T> {
fn into_pointer(&self) -> *const () {
self.0 as *const _ as *const ()
}

View file

@ -60,6 +60,7 @@ pub use rustc_type_ir::fast_reject::DeepRejectCtxt;
)]
use rustc_type_ir::inherent;
pub use rustc_type_ir::relate::VarianceDiagInfo;
pub use rustc_type_ir::solve::SizedTraitKind;
pub use rustc_type_ir::*;
#[allow(hidden_glob_reexports, unused_imports)]
use rustc_type_ir::{InferCtxtLike, Interner};

View file

@ -1069,24 +1069,35 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
let mut traits = FxIndexMap::default();
let mut fn_traits = FxIndexMap::default();
let mut lifetimes = SmallVec::<[ty::Region<'tcx>; 1]>::new();
let mut has_sized_bound = false;
let mut has_negative_sized_bound = false;
let mut lifetimes = SmallVec::<[ty::Region<'tcx>; 1]>::new();
let mut has_meta_sized_bound = false;
for (predicate, _) in bounds.iter_instantiated_copied(tcx, args) {
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
ty::ClauseKind::Trait(pred) => {
// Don't print `+ Sized`, but rather `+ ?Sized` if absent.
if tcx.is_lang_item(pred.def_id(), LangItem::Sized) {
match pred.polarity {
// With `feature(sized_hierarchy)`, don't print `?Sized` as an alias for
// `MetaSized`, and skip sizedness bounds to be added at the end.
match tcx.as_lang_item(pred.def_id()) {
Some(LangItem::Sized) => match pred.polarity {
ty::PredicatePolarity::Positive => {
has_sized_bound = true;
continue;
}
ty::PredicatePolarity::Negative => has_negative_sized_bound = true,
},
Some(LangItem::MetaSized) => {
has_meta_sized_bound = true;
continue;
}
Some(LangItem::PointeeSized) => {
bug!("`PointeeSized` is removed during lowering");
}
_ => (),
}
self.insert_trait_and_projection(
@ -1255,8 +1266,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
})?;
}
let using_sized_hierarchy = self.tcx().features().sized_hierarchy();
let add_sized = has_sized_bound && (first || has_negative_sized_bound);
let add_maybe_sized = !has_sized_bound && !has_negative_sized_bound;
let add_maybe_sized =
has_meta_sized_bound && !has_negative_sized_bound && !using_sized_hierarchy;
// Set `has_pointee_sized_bound` if there were no `Sized` or `MetaSized` bounds.
let has_pointee_sized_bound =
!has_sized_bound && !has_meta_sized_bound && !has_negative_sized_bound;
if add_sized || add_maybe_sized {
if !first {
write!(self, " + ")?;
@ -1265,6 +1281,16 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
write!(self, "?")?;
}
write!(self, "Sized")?;
} else if has_meta_sized_bound && using_sized_hierarchy {
if !first {
write!(self, " + ")?;
}
write!(self, "MetaSized")?;
} else if has_pointee_sized_bound && using_sized_hierarchy {
if !first {
write!(self, " + ")?;
}
write!(self, "PointeeSized")?;
}
if !with_forced_trimmed_paths() {

View file

@ -15,6 +15,7 @@ use rustc_hir::def_id::DefId;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, extension};
use rustc_span::{DUMMY_SP, Span, Symbol, sym};
use rustc_type_ir::TyKind::*;
use rustc_type_ir::solve::SizedTraitKind;
use rustc_type_ir::walk::TypeWalker;
use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind, TypeVisitableExt, elaborate};
use tracing::instrument;
@ -1677,7 +1678,7 @@ impl<'tcx> Ty<'tcx> {
let Some(pointee_ty) = self.builtin_deref(true) else {
bug!("Type {self:?} is not a pointer or reference type")
};
if pointee_ty.is_trivially_sized(tcx) {
if pointee_ty.has_trivial_sizedness(tcx, SizedTraitKind::Sized) {
tcx.types.unit
} else {
match pointee_ty.ptr_metadata_ty_or_tail(tcx, |x| x) {
@ -1778,17 +1779,17 @@ impl<'tcx> Ty<'tcx> {
}
}
/// Fast path helper for testing if a type is `Sized`.
/// Fast path helper for testing if a type is `Sized` or `MetaSized`.
///
/// Returning true means the type is known to implement `Sized`. Returning `false` means
/// nothing -- could be sized, might not be.
/// Returning true means the type is known to implement the sizedness trait. Returning `false`
/// means nothing -- could be sized, might not be.
///
/// Note that we could never rely on the fact that a type such as `[_]` is trivially `!Sized`
/// because we could be in a type environment with a bound such as `[_]: Copy`. A function with
/// such a bound obviously never can be called, but that doesn't mean it shouldn't typecheck.
/// This is why this method doesn't return `Option<bool>`.
#[instrument(skip(tcx), level = "debug")]
pub fn is_trivially_sized(self, tcx: TyCtxt<'tcx>) -> bool {
pub fn has_trivial_sizedness(self, tcx: TyCtxt<'tcx>, sizedness: SizedTraitKind) -> bool {
match self.kind() {
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
| ty::Uint(_)
@ -1811,13 +1812,20 @@ impl<'tcx> Ty<'tcx> {
| ty::Error(_)
| ty::Dynamic(_, _, ty::DynStar) => true,
ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => false,
ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) => match sizedness {
SizedTraitKind::Sized => false,
SizedTraitKind::MetaSized => true,
},
ty::Tuple(tys) => tys.last().is_none_or(|ty| ty.is_trivially_sized(tcx)),
ty::Foreign(..) => match sizedness {
SizedTraitKind::Sized | SizedTraitKind::MetaSized => false,
},
ty::Tuple(tys) => tys.last().is_none_or(|ty| ty.has_trivial_sizedness(tcx, sizedness)),
ty::Adt(def, args) => def
.sized_constraint(tcx)
.is_none_or(|ty| ty.instantiate(tcx, args).is_trivially_sized(tcx)),
.sizedness_constraint(tcx, sizedness)
.is_none_or(|ty| ty.instantiate(tcx, args).has_trivial_sizedness(tcx, sizedness)),
ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) | ty::Bound(..) => false,

View file

@ -16,6 +16,7 @@ use rustc_index::bit_set::GrowableBitSet;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension};
use rustc_session::Limit;
use rustc_span::sym;
use rustc_type_ir::solve::SizedTraitKind;
use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};
@ -1132,7 +1133,8 @@ impl<'tcx> Ty<'tcx> {
/// strange rules like `<T as Foo<'static>>::Bar: Sized` that
/// actually carry lifetime requirements.
pub fn is_sized(self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool {
self.is_trivially_sized(tcx) || tcx.is_sized_raw(typing_env.as_query_input(self))
self.has_trivial_sizedness(tcx, SizedTraitKind::Sized)
|| tcx.is_sized_raw(typing_env.as_query_input(self))
}
/// Checks whether values of this type `T` implement the `Freeze`

View file

@ -74,7 +74,11 @@ impl SimplifyCfg {
}
pub(super) fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
CfgSimplifier::new(tcx, body).simplify();
if CfgSimplifier::new(tcx, body).simplify() {
// `simplify` returns that it changed something. We must invalidate the CFG caches as they
// are not consistent with the modified CFG any more.
body.basic_blocks.invalidate_cfg_cache();
}
remove_dead_blocks(body);
// FIXME: Should probably be moved into some kind of pass manager
@ -121,12 +125,16 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
// Preserve `SwitchInt` reads on built and analysis MIR, or if `-Zmir-preserve-ub`.
let preserve_switch_reads = matches!(body.phase, MirPhase::Built | MirPhase::Analysis(_))
|| tcx.sess.opts.unstable_opts.mir_preserve_ub;
let basic_blocks = body.basic_blocks_mut();
// Do not clear caches yet. The caller to `simplify` will do it if anything changed.
let basic_blocks = body.basic_blocks.as_mut_preserves_cfg();
CfgSimplifier { preserve_switch_reads, basic_blocks, pred_count }
}
fn simplify(mut self) {
/// Returns whether we actually simplified anything. In that case, the caller *must* invalidate
/// the CFG caches of the MIR body.
#[must_use]
fn simplify(mut self) -> bool {
self.strip_nops();
// Vec of the blocks that should be merged. We store the indices here, instead of the
@ -134,6 +142,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
// We do not push the statements directly into the target block (`bb`) as that is slower
// due to additional reallocations
let mut merged_blocks = Vec::new();
let mut outer_changed = false;
loop {
let mut changed = false;
@ -177,7 +186,11 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
if !changed {
break;
}
outer_changed = true;
}
outer_changed
}
/// This function will return `None` if

View file

@ -8,6 +8,7 @@ use std::ops::ControlFlow;
use derive_where::derive_where;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::solve::SizedTraitKind;
use rustc_type_ir::{
self as ty, Interner, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt as _,
TypeVisitor, TypingMode, Upcast as _, elaborate,
@ -203,13 +204,15 @@ where
goal: Goal<I, Self>,
) -> Result<Candidate<I>, NoSolution>;
/// A type is `Sized` if its tail component is `Sized`.
/// A type is `Sized` if its tail component is `Sized` and a type is `MetaSized` if its tail
/// component is `MetaSized`.
///
/// These components are given by built-in rules from
/// [`structural_traits::instantiate_constituent_tys_for_sized_trait`].
fn consider_builtin_sized_candidate(
/// [`structural_traits::instantiate_constituent_tys_for_sizedness_trait`].
fn consider_builtin_sizedness_candidates(
ecx: &mut EvalCtxt<'_, D>,
goal: Goal<I, Self>,
sizedness: SizedTraitKind,
) -> Result<Candidate<I>, NoSolution>;
/// A type is `Copy` or `Clone` if its components are `Copy` or `Clone`.
@ -466,7 +469,15 @@ where
G::consider_trait_alias_candidate(self, goal)
} else {
match cx.as_lang_item(trait_def_id) {
Some(TraitSolverLangItem::Sized) => G::consider_builtin_sized_candidate(self, goal),
Some(TraitSolverLangItem::Sized) => {
G::consider_builtin_sizedness_candidates(self, goal, SizedTraitKind::Sized)
}
Some(TraitSolverLangItem::MetaSized) => {
G::consider_builtin_sizedness_candidates(self, goal, SizedTraitKind::MetaSized)
}
Some(TraitSolverLangItem::PointeeSized) => {
unreachable!("`PointeeSized` is removed during lowering");
}
Some(TraitSolverLangItem::Copy | TraitSolverLangItem::Clone) => {
G::consider_builtin_copy_clone_candidate(self, goal)
}

View file

@ -5,6 +5,7 @@ use derive_where::derive_where;
use rustc_type_ir::data_structures::HashMap;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::solve::SizedTraitKind;
use rustc_type_ir::solve::inspect::ProbeKind;
use rustc_type_ir::{
self as ty, FallibleTypeFolder, Interner, Movability, Mutability, TypeFoldable,
@ -104,8 +105,9 @@ where
}
#[instrument(level = "trace", skip(ecx), ret)]
pub(in crate::solve) fn instantiate_constituent_tys_for_sized_trait<D, I>(
pub(in crate::solve) fn instantiate_constituent_tys_for_sizedness_trait<D, I>(
ecx: &EvalCtxt<'_, D>,
sizedness: SizedTraitKind,
ty: I::Ty,
) -> Result<ty::Binder<I, Vec<I::Ty>>, NoSolution>
where
@ -113,8 +115,9 @@ where
I: Interner,
{
match ty.kind() {
// impl Sized for u*, i*, bool, f*, FnDef, FnPtr, *(const/mut) T, char, &mut? T, [T; N], dyn* Trait, !
// impl Sized for Coroutine, CoroutineWitness, Closure, CoroutineClosure
// impl {Meta,}Sized for u*, i*, bool, f*, FnDef, FnPtr, *(const/mut) T, char
// impl {Meta,}Sized for &mut? T, [T; N], dyn* Trait, !, Coroutine, CoroutineWitness
// impl {Meta,}Sized for Closure, CoroutineClosure
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
| ty::Uint(_)
| ty::Int(_)
@ -135,13 +138,16 @@ where
| ty::Dynamic(_, _, ty::DynStar)
| ty::Error(_) => Ok(ty::Binder::dummy(vec![])),
ty::Str
| ty::Slice(_)
| ty::Dynamic(..)
| ty::Foreign(..)
| ty::Alias(..)
| ty::Param(_)
| ty::Placeholder(..) => Err(NoSolution),
// impl {Meta,}Sized for str, [T], dyn Trait
ty::Str | ty::Slice(_) | ty::Dynamic(..) => match sizedness {
SizedTraitKind::Sized => Err(NoSolution),
SizedTraitKind::MetaSized => Ok(ty::Binder::dummy(vec![])),
},
// impl {} for extern type
ty::Foreign(..) => Err(NoSolution),
ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => Err(NoSolution),
ty::Bound(..)
| ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
@ -150,22 +156,27 @@ where
ty::UnsafeBinder(bound_ty) => Ok(bound_ty.map_bound(|ty| vec![ty])),
// impl Sized for ()
// impl Sized for (T1, T2, .., Tn) where Tn: Sized if n >= 1
// impl {Meta,}Sized for ()
// impl {Meta,}Sized for (T1, T2, .., Tn) where Tn: {Meta,}Sized if n >= 1
ty::Tuple(tys) => Ok(ty::Binder::dummy(tys.last().map_or_else(Vec::new, |ty| vec![ty]))),
// impl Sized for Adt<Args...> where sized_constraint(Adt)<Args...>: Sized
// `sized_constraint(Adt)` is the deepest struct trail that can be determined
// by the definition of `Adt`, independent of the generic args.
// impl Sized for Adt<Args...> if sized_constraint(Adt) == None
// As a performance optimization, `sized_constraint(Adt)` can return `None`
// if the ADTs definition implies that it is sized by for all possible args.
// impl {Meta,}Sized for Adt<Args...>
// where {meta,pointee,}sized_constraint(Adt)<Args...>: {Meta,}Sized
//
// `{meta,pointee,}sized_constraint(Adt)` is the deepest struct trail that can be
// determined by the definition of `Adt`, independent of the generic args.
//
// impl {Meta,}Sized for Adt<Args...>
// if {meta,pointee,}sized_constraint(Adt) == None
//
// As a performance optimization, `{meta,pointee,}sized_constraint(Adt)` can return `None`
// if the ADTs definition implies that it is {meta,}sized by for all possible args.
// In this case, the builtin impl will have no nested subgoals. This is a
// "best effort" optimization and `sized_constraint` may return `Some`, even
// if the ADT is sized for all possible args.
// "best effort" optimization and `{meta,pointee,}sized_constraint` may return `Some`,
// even if the ADT is {meta,pointee,}sized for all possible args.
ty::Adt(def, args) => {
if let Some(sized_crit) = def.sized_constraint(ecx.cx()) {
Ok(ty::Binder::dummy(vec![sized_crit.instantiate(ecx.cx(), args)]))
if let Some(crit) = def.sizedness_constraint(ecx.cx(), sizedness) {
Ok(ty::Binder::dummy(vec![crit.instantiate(ecx.cx(), args)]))
} else {
Ok(ty::Binder::dummy(vec![]))
}

View file

@ -4,6 +4,7 @@
use rustc_type_ir::fast_reject::DeepRejectCtxt;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::solve::SizedTraitKind;
use rustc_type_ir::solve::inspect::ProbeKind;
use rustc_type_ir::{self as ty, Interner, elaborate};
use tracing::instrument;
@ -198,11 +199,12 @@ where
unreachable!("trait aliases are never const")
}
fn consider_builtin_sized_candidate(
fn consider_builtin_sizedness_candidates(
_ecx: &mut EvalCtxt<'_, D>,
_goal: Goal<I, Self>,
_sizedness: SizedTraitKind,
) -> Result<Candidate<I>, NoSolution> {
unreachable!("Sized is never const")
unreachable!("Sized/MetaSized is never const")
}
fn consider_builtin_copy_clone_candidate(

View file

@ -6,6 +6,7 @@ mod opaque_types;
use rustc_type_ir::fast_reject::DeepRejectCtxt;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::solve::SizedTraitKind;
use rustc_type_ir::{self as ty, Interner, NormalizesTo, PredicateKind, Upcast as _};
use tracing::instrument;
@ -413,11 +414,12 @@ where
panic!("trait aliases do not have associated types: {:?}", goal);
}
fn consider_builtin_sized_candidate(
fn consider_builtin_sizedness_candidates(
_ecx: &mut EvalCtxt<'_, D>,
goal: Goal<I, Self>,
_sizedness: SizedTraitKind,
) -> Result<Candidate<I>, NoSolution> {
panic!("`Sized` does not have an associated type: {:?}", goal);
panic!("`Sized`/`MetaSized` does not have an associated type: {:?}", goal);
}
fn consider_builtin_copy_clone_candidate(

View file

@ -4,9 +4,9 @@ use rustc_type_ir::data_structures::IndexSet;
use rustc_type_ir::fast_reject::DeepRejectCtxt;
use rustc_type_ir::inherent::*;
use rustc_type_ir::lang_items::TraitSolverLangItem;
use rustc_type_ir::solve::CanonicalResponse;
use rustc_type_ir::solve::{CanonicalResponse, SizedTraitKind};
use rustc_type_ir::{
self as ty, Interner, Movability, TraitPredicate, TypeVisitableExt as _, TypingMode,
self as ty, Interner, Movability, TraitPredicate, TraitRef, TypeVisitableExt as _, TypingMode,
Upcast as _, elaborate,
};
use tracing::{debug, instrument, trace};
@ -131,9 +131,11 @@ where
assumption: I::Clause,
) -> Result<(), NoSolution> {
if let Some(trait_clause) = assumption.as_trait_clause() {
if trait_clause.def_id() == goal.predicate.def_id()
&& trait_clause.polarity() == goal.predicate.polarity
{
if trait_clause.polarity() != goal.predicate.polarity {
return Err(NoSolution);
}
if trait_clause.def_id() == goal.predicate.def_id() {
if DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify(
goal.predicate.trait_ref.args,
trait_clause.skip_binder().trait_ref.args,
@ -141,6 +143,17 @@ where
return Ok(());
}
}
// PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so
// check for a `Sized` subtrait when looking for `MetaSized`. `PointeeSized` bounds
// are syntactic sugar for a lack of bounds so don't need this.
if ecx.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::MetaSized)
&& ecx.cx().is_lang_item(trait_clause.def_id(), TraitSolverLangItem::Sized)
{
let meta_sized_clause =
trait_predicate_with_def_id(ecx.cx(), trait_clause, goal.predicate.def_id());
return Self::fast_reject_assumption(ecx, goal, meta_sized_clause);
}
}
Err(NoSolution)
@ -154,6 +167,17 @@ where
) -> QueryResult<I> {
let trait_clause = assumption.as_trait_clause().unwrap();
// PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so
// check for a `Sized` subtrait when looking for `MetaSized`. `PointeeSized` bounds
// are syntactic sugar for a lack of bounds so don't need this.
if ecx.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::MetaSized)
&& ecx.cx().is_lang_item(trait_clause.def_id(), TraitSolverLangItem::Sized)
{
let meta_sized_clause =
trait_predicate_with_def_id(ecx.cx(), trait_clause, goal.predicate.def_id());
return Self::match_assumption(ecx, goal, meta_sized_clause, then);
}
let assumption_trait_pred = ecx.instantiate_binder_with_infer(trait_clause);
ecx.eq(goal.param_env, goal.predicate.trait_ref, assumption_trait_pred.trait_ref)?;
@ -245,9 +269,10 @@ where
})
}
fn consider_builtin_sized_candidate(
fn consider_builtin_sizedness_candidates(
ecx: &mut EvalCtxt<'_, D>,
goal: Goal<I, Self>,
sizedness: SizedTraitKind,
) -> Result<Candidate<I>, NoSolution> {
if goal.predicate.polarity != ty::PredicatePolarity::Positive {
return Err(NoSolution);
@ -256,7 +281,11 @@ where
ecx.probe_and_evaluate_goal_for_constituent_tys(
CandidateSource::BuiltinImpl(BuiltinImplSource::Trivial),
goal,
structural_traits::instantiate_constituent_tys_for_sized_trait,
|ecx, ty| {
structural_traits::instantiate_constituent_tys_for_sizedness_trait(
ecx, sizedness, ty,
)
},
)
}
@ -812,6 +841,25 @@ where
}
}
/// Small helper function to change the `def_id` of a trait predicate - this is not normally
/// something that you want to do, as different traits will require different args and so making
/// it easy to change the trait is something of a footgun, but it is useful in the narrow
/// circumstance of changing from `MetaSized` to `Sized`, which happens as part of the lazy
/// elaboration of sizedness candidates.
#[inline(always)]
fn trait_predicate_with_def_id<I: Interner>(
cx: I,
clause: ty::Binder<I, ty::TraitPredicate<I>>,
did: I::DefId,
) -> I::Clause {
clause
.map_bound(|c| TraitPredicate {
trait_ref: TraitRef::new_from_args(cx, did, c.trait_ref.args),
polarity: c.polarity,
})
.upcast(cx)
}
impl<D, I> EvalCtxt<'_, D>
where
D: SolverDelegate<Interner = I>,

View file

@ -4087,7 +4087,7 @@ impl<'a> CondChecker<'a> {
}
impl MutVisitor for CondChecker<'_> {
fn visit_expr(&mut self, e: &mut P<Expr>) {
fn visit_expr(&mut self, e: &mut Expr) {
self.depth += 1;
use ForbiddenLetReason::*;

View file

@ -1094,7 +1094,7 @@ impl<'a> Parser<'a> {
fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
struct AddMut(bool);
impl MutVisitor for AddMut {
fn visit_pat(&mut self, pat: &mut P<Pat>) {
fn visit_pat(&mut self, pat: &mut Pat) {
if let PatKind::Ident(BindingMode(ByRef::No, m @ Mutability::Not), ..) =
&mut pat.kind
{

View file

@ -3,6 +3,7 @@
// tidy-alphabetical-start
#![allow(internal_features)]
#![allow(rustc::internal)]
#![cfg_attr(not(bootstrap), feature(sized_hierarchy))]
#![cfg_attr(test, feature(test))]
#![doc(
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
@ -27,3 +28,19 @@ mod serialize;
pub mod int_overflow;
pub mod leb128;
pub mod opaque;
// This has nothing to do with `rustc_serialize` but it is convenient to define it in one place
// for the rest of the compiler so that `cfg(bootstrap)` doesn't need to be littered throughout
// the compiler wherever `PointeeSized` would be used. `rustc_serialize` happens to be the deepest
// crate in the crate graph which uses `PointeeSized`.
//
// When bootstrap bumps, remove both the `cfg(not(bootstrap))` and `cfg(bootstrap)` lines below
// and just import `std::marker::PointeeSized` whereever this item was used.
#[cfg(not(bootstrap))]
pub use std::marker::PointeeSized;
#[cfg(bootstrap)]
pub trait PointeeSized {}
#[cfg(bootstrap)]
impl<T: ?Sized> PointeeSized for T {}

View file

@ -142,7 +142,7 @@ pub trait Decoder {
/// `rustc_metadata::rmeta::Lazy`.
/// * `TyEncodable` should be used for types that are only serialized in crate
/// metadata or the incremental cache. This is most types in `rustc_middle`.
pub trait Encodable<S: Encoder> {
pub trait Encodable<S: Encoder>: crate::PointeeSized {
fn encode(&self, s: &mut S);
}
@ -198,7 +198,7 @@ direct_serialize_impls! {
char emit_char read_char
}
impl<S: Encoder, T: ?Sized> Encodable<S> for &T
impl<S: Encoder, T: ?Sized + crate::PointeeSized> Encodable<S> for &T
where
T: Encodable<S>,
{

View file

@ -3118,6 +3118,7 @@ pub(crate) mod dep_tracking {
}
impl_dep_tracking_hash_via_hash!(
(),
AutoDiff,
bool,
usize,

View file

@ -802,6 +802,7 @@ mod desc {
"either a boolean (`yes`, `no`, `on`, `off`, etc), or a non-negative number";
pub(crate) const parse_llvm_module_flag: &str = "<key>:<type>:<value>:<behavior>. Type must currently be `u32`. Behavior should be one of (`error`, `warning`, `require`, `override`, `append`, `appendunique`, `max`, `min`)";
pub(crate) const parse_function_return: &str = "`keep` or `thunk-extern`";
pub(crate) const parse_wasm_c_abi: &str = "`spec`";
pub(crate) const parse_mir_include_spans: &str =
"either a boolean (`yes`, `no`, `on`, `off`, etc), or `nll` (default: `nll`)";
pub(crate) const parse_align: &str = "a number that is a power of 2 between 1 and 2^29";
@ -1897,6 +1898,10 @@ pub mod parse {
true
}
pub(crate) fn parse_wasm_c_abi(_slot: &mut (), v: Option<&str>) -> bool {
v == Some("spec")
}
pub(crate) fn parse_mir_include_spans(slot: &mut MirIncludeSpans, v: Option<&str>) -> bool {
*slot = match v {
Some("on" | "yes" | "y" | "true") | None => MirIncludeSpans::On,
@ -2631,6 +2636,11 @@ written to standard error output)"),
Requires `-Clto[=[fat,yes]]`"),
wasi_exec_model: Option<WasiExecModel> = (None, parse_wasi_exec_model, [TRACKED],
"whether to build a wasi command or reactor"),
// This option only still exists to provide a more gradual transition path for people who need
// the spec-complaint C ABI to be used.
// FIXME remove this after a couple releases
wasm_c_abi: () = ((), parse_wasm_c_abi, [TRACKED],
"use spec-compliant C ABI for `wasm32-unknown-unknown` (deprecated, always enabled)"),
write_long_types_to_disk: bool = (true, parse_bool, [UNTRACKED],
"whether long type names should be written to files instead of being printed in errors"),
// tidy-alphabetical-end

View file

@ -9,6 +9,7 @@
// tidy-alphabetical-start
#![allow(internal_features)]
#![allow(rustc::usage_of_ty_tykind)]
#![cfg_attr(not(bootstrap), feature(sized_hierarchy))]
#![doc(
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
test(attr(allow(unused_variables), deny(warnings)))

View file

@ -9,6 +9,7 @@
use std::ops::RangeInclusive;
use rustc_data_structures::PointeeSized;
use rustc_hir::def::DefKind;
use rustc_middle::mir;
use rustc_middle::mir::interpret::AllocId;
@ -158,7 +159,7 @@ pub(crate) fn new_item_kind(kind: DefKind) -> ItemKind {
}
/// Trait used to convert between an internal MIR type to a Stable MIR type.
pub trait Stable<'cx> {
pub trait Stable<'cx>: PointeeSized {
/// The stable representation of the type implementing Stable.
type T;
/// Converts an object to the equivalent Stable MIR representation.

View file

@ -1355,6 +1355,7 @@ symbols! {
memtag,
message,
meta,
meta_sized,
metadata_type,
min_const_fn,
min_const_generics,
@ -1613,6 +1614,7 @@ symbols! {
plugin_registrar,
plugins,
pointee,
pointee_sized,
pointee_trait,
pointer,
pointer_like,
@ -2016,6 +2018,7 @@ symbols! {
size_of,
size_of_val,
sized,
sized_hierarchy,
skip,
slice,
slice_from_raw_parts,

View file

@ -6,33 +6,36 @@
//! to a new platform, and allows for an unprecedented level of control over how
//! the compiler works.
//!
//! # Using custom targets
//! # Using targets and target.json
//!
//! A target tuple, as passed via `rustc --target=TUPLE`, will first be
//! compared against the list of built-in targets. This is to ease distributing
//! rustc (no need for configuration files) and also to hold these built-in
//! targets as immutable and sacred. If `TUPLE` is not one of the built-in
//! targets, rustc will check if a file named `TUPLE` exists. If it does, it
//! will be loaded as the target configuration. If the file does not exist,
//! rustc will search each directory in the environment variable
//! `RUST_TARGET_PATH` for a file named `TUPLE.json`. The first one found will
//! be loaded. If no file is found in any of those directories, a fatal error
//! will be given.
//! Invoking "rustc --target=${TUPLE}" will result in rustc initiating the [`Target::search`] by
//! - checking if "$TUPLE" is a complete path to a json (ending with ".json") and loading if so
//! - checking builtin targets for "${TUPLE}"
//! - checking directories in "${RUST_TARGET_PATH}" for "${TUPLE}.json"
//! - checking for "${RUSTC_SYSROOT}/lib/rustlib/${TUPLE}/target.json"
//!
//! Projects defining their own targets should use
//! `--target=path/to/my-awesome-platform.json` instead of adding to
//! `RUST_TARGET_PATH`.
//! Code will then be compiled using the first discovered target spec.
//!
//! # Defining a new target
//!
//! Targets are defined using [JSON](https://json.org/). The `Target` struct in
//! this module defines the format the JSON file should take, though each
//! underscore in the field names should be replaced with a hyphen (`-`) in the
//! JSON file. Some fields are required in every target specification, such as
//! `llvm-target`, `target-endian`, `target-pointer-width`, `data-layout`,
//! `arch`, and `os`. In general, options passed to rustc with `-C` override
//! the target's settings, though `target-feature` and `link-args` will *add*
//! to the list specified by the target, rather than replace.
//! Targets are defined using a struct which additionally has serialization to and from [JSON].
//! The `Target` struct in this module loosely corresponds with the format the JSON takes.
//! We usually try to make the fields equivalent but we have given up on a 1:1 correspondence
//! between the JSON and the actual structure itself.
//!
//! Some fields are required in every target spec, and they should be embedded in Target directly.
//! Optional keys are in TargetOptions, but Target derefs to it, for no practical difference.
//! Most notable is the "data-layout" field which specifies Rust's notion of sizes and alignments
//! for several key types, such as f64, pointers, and so on.
//!
//! At one point we felt `-C` options should override the target's settings, like in C compilers,
//! but that was an essentially-unmarked route for making code incorrect and Rust unsound.
//! Confronted with programmers who prefer a compiler with a good UX instead of a lethal weapon,
//! we have almost-entirely recanted that notion, though we hope "target modifiers" will offer
//! a way to have a decent UX yet still extend the necessary compiler controls, without
//! requiring a new target spec for each and every single possible target micro-variant.
//!
//! [JSON]: https://json.org
use std::borrow::Cow;
use std::collections::BTreeMap;

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64-unknown-freebsd".into(),

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv1".into();
Target {
llvm_target: "powerpc64-unknown-linux-gnu".into(),

View file

@ -12,6 +12,7 @@ pub(crate) fn target() -> Target {
base.stack_probes = StackProbeType::Inline;
// FIXME(compiler-team#422): musl targets should be dynamically linked by default.
base.crt_static_default = true;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64-unknown-linux-musl".into(),

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64-unknown-openbsd".into(),

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv1".into();
Target {
llvm_target: "powerpc64-unknown-linux-gnu".into(),

View file

@ -8,6 +8,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64le-unknown-freebsd".into(),

View file

@ -8,6 +8,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64le-unknown-linux-gnu".into(),

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.stack_probes = StackProbeType::Inline;
// FIXME(compiler-team#422): musl targets should be dynamically linked by default.
base.crt_static_default = true;
base.llvm_abiname = "elfv2".into();
Target {
llvm_target: "powerpc64le-unknown-linux-musl".into(),

View file

@ -199,7 +199,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
// avoid inundating the user with unnecessary errors, but we now
// check upstream for type errors and don't add the obligations to
// begin with in those cases.
if self.tcx.is_lang_item(trait_pred.def_id(), LangItem::Sized) {
if matches!(
self.tcx.as_lang_item(trait_pred.def_id()),
Some(LangItem::Sized | LangItem::MetaSized)
) {
match self.tainted_by_errors() {
None => {
let err = self.emit_inference_failure_err(

View file

@ -13,7 +13,7 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::{Applicability, Diag, E0038, E0276, MultiSpan, struct_span_code_err};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{self as hir, AmbigArg, LangItem};
use rustc_hir::{self as hir, AmbigArg};
use rustc_infer::traits::solve::Goal;
use rustc_infer::traits::{
DynCompatibilityViolation, Obligation, ObligationCause, ObligationCauseCode,
@ -160,17 +160,24 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
})
.collect();
// Ensure `T: Sized` and `T: WF` obligations come last. This lets us display diagnostics
// with more relevant type information and hide redundant E0282 errors.
errors.sort_by_key(|e| match e.obligation.predicate.kind().skip_binder() {
ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred))
if self.tcx.is_lang_item(pred.def_id(), LangItem::Sized) =>
{
1
// Ensure `T: Sized`, `T: MetaSized`, `T: PointeeSized` and `T: WF` obligations come last.
// This lets us display diagnostics with more relevant type information and hide redundant
// E0282 errors.
errors.sort_by_key(|e| {
let maybe_sizedness_did = match e.obligation.predicate.kind().skip_binder() {
ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) => Some(pred.def_id()),
ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(pred)) => Some(pred.def_id()),
_ => None,
};
match e.obligation.predicate.kind().skip_binder() {
_ if maybe_sizedness_did == self.tcx.lang_items().sized_trait() => 1,
_ if maybe_sizedness_did == self.tcx.lang_items().meta_sized_trait() => 2,
_ if maybe_sizedness_did == self.tcx.lang_items().pointee_sized_trait() => 3,
ty::PredicateKind::Coerce(_) => 4,
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(_)) => 5,
_ => 0,
}
ty::PredicateKind::Coerce(_) => 2,
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(_)) => 3,
_ => 0,
});
for (index, error) in errors.iter().enumerate() {
@ -327,19 +334,26 @@ pub(crate) fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Opti
let trait_ref = tcx.impl_trait_ref(impl_def_id)?.instantiate_identity();
let mut w = "impl".to_owned();
let args = ty::GenericArgs::identity_for_item(tcx, impl_def_id);
#[derive(Debug, Default)]
struct SizednessFound {
sized: bool,
meta_sized: bool,
}
// FIXME: Currently only handles ?Sized.
// Needs to support ?Move and ?DynSized when they are implemented.
let mut types_without_default_bounds = FxIndexSet::default();
let sized_trait = tcx.lang_items().sized_trait();
let mut types_with_sizedness_bounds = FxIndexMap::<_, SizednessFound>::default();
let args = ty::GenericArgs::identity_for_item(tcx, impl_def_id);
let arg_names = args.iter().map(|k| k.to_string()).filter(|k| k != "'_").collect::<Vec<_>>();
if !arg_names.is_empty() {
types_without_default_bounds.extend(args.types());
w.push('<');
w.push_str(&arg_names.join(", "));
w.push('>');
for ty in args.types() {
// `PointeeSized` params might have no predicates.
types_with_sizedness_bounds.insert(ty, SizednessFound::default());
}
}
write!(
@ -351,24 +365,47 @@ pub(crate) fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Opti
)
.unwrap();
// The predicates will contain default bounds like `T: Sized`. We need to
// remove these bounds, and add `T: ?Sized` to any untouched type parameters.
let predicates = tcx.predicates_of(impl_def_id).predicates;
let mut pretty_predicates =
Vec::with_capacity(predicates.len() + types_without_default_bounds.len());
let mut pretty_predicates = Vec::with_capacity(predicates.len());
let sized_trait = tcx.lang_items().sized_trait();
let meta_sized_trait = tcx.lang_items().meta_sized_trait();
for (p, _) in predicates {
if let Some(poly_trait_ref) = p.as_trait_clause() {
if Some(poly_trait_ref.def_id()) == sized_trait {
// FIXME(#120456) - is `swap_remove` correct?
types_without_default_bounds.swap_remove(&poly_trait_ref.self_ty().skip_binder());
// Accumulate the sizedness bounds for each self ty.
if let Some(trait_clause) = p.as_trait_clause() {
let self_ty = trait_clause.self_ty().skip_binder();
let sizedness_of = types_with_sizedness_bounds.entry(self_ty).or_default();
if Some(trait_clause.def_id()) == sized_trait {
sizedness_of.sized = true;
continue;
} else if Some(trait_clause.def_id()) == meta_sized_trait {
sizedness_of.meta_sized = true;
continue;
}
}
pretty_predicates.push(p.to_string());
}
pretty_predicates.extend(types_without_default_bounds.iter().map(|ty| format!("{ty}: ?Sized")));
for (ty, sizedness) in types_with_sizedness_bounds {
if !tcx.features().sized_hierarchy() {
if sizedness.sized {
// Maybe a default bound, don't write anything.
} else {
pretty_predicates.push(format!("{ty}: ?Sized"));
}
} else {
if sizedness.sized {
// Maybe a default bound, don't write anything.
pretty_predicates.push(format!("{ty}: Sized"));
} else if sizedness.meta_sized {
pretty_predicates.push(format!("{ty}: MetaSized"));
} else {
pretty_predicates.push(format!("{ty}: PointeeSized"));
}
}
}
if !pretty_predicates.is_empty() {
write!(w, "\n where {}", pretty_predicates.join(", ")).unwrap();
@ -519,7 +556,7 @@ fn attempt_dyn_to_enum_suggestion(
let Some(impl_type) = tcx.type_of(*impl_id).no_bound_vars() else { return None };
// Obviously unsized impl types won't be usable in an enum.
// Note: this doesn't use `Ty::is_trivially_sized` because that function
// Note: this doesn't use `Ty::has_trivial_sizedness` because that function
// defaults to assuming that things are *not* sized, whereas we want to
// fall back to assuming that things may be sized.
match impl_type.kind() {

View file

@ -12,7 +12,7 @@ use rustc_infer::traits::solve::Goal;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::solve::Certainty;
use rustc_middle::ty::{
self, Ty, TyCtxt, TypeFlags, TypeFoldable, TypeVisitableExt as _, TypingMode,
self, SizedTraitKind, Ty, TyCtxt, TypeFlags, TypeFoldable, TypeVisitableExt as _, TypingMode,
};
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span};
@ -79,7 +79,14 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate<
Some(LangItem::Sized)
if self
.resolve_vars_if_possible(trait_pred.self_ty().skip_binder())
.is_trivially_sized(self.0.tcx) =>
.has_trivial_sizedness(self.0.tcx, SizedTraitKind::Sized) =>
{
return Some(Certainty::Yes);
}
Some(LangItem::MetaSized)
if self
.resolve_vars_if_possible(trait_pred.self_ty().skip_binder())
.has_trivial_sizedness(self.0.tcx, SizedTraitKind::MetaSized) =>
{
return Some(Certainty::Yes);
}

View file

@ -462,6 +462,7 @@ fn impl_intersection_has_negative_obligation(
let param_env = infcx.resolve_vars_if_possible(param_env);
util::elaborate(tcx, tcx.predicates_of(impl2_def_id).instantiate(tcx, impl2_header.impl_args))
.elaborate_sized()
.any(|(clause, _)| try_prove_negated_where_clause(infcx, clause, param_env))
}

View file

@ -7,15 +7,15 @@
use std::ops::ControlFlow;
use rustc_errors::FatalError;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, LangItem};
use rustc_middle::query::Providers;
use rustc_middle::ty::{
self, EarlyBinder, GenericArgs, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode, Upcast,
elaborate,
};
use rustc_span::Span;
use rustc_span::{DUMMY_SP, Span};
use smallvec::SmallVec;
use tracing::{debug, instrument};
@ -543,11 +543,11 @@ fn receiver_for_self_ty<'tcx>(
/// In practice, we cannot use `dyn Trait` explicitly in the obligation because it would result in
/// a new check that `Trait` is dyn-compatible, creating a cycle.
/// Instead, we emulate a placeholder by introducing a new type parameter `U` such that
/// `Self: Unsize<U>` and `U: Trait + ?Sized`, and use `U` in place of `dyn Trait`.
/// `Self: Unsize<U>` and `U: Trait + MetaSized`, and use `U` in place of `dyn Trait`.
///
/// Written as a chalk-style query:
/// ```ignore (not-rust)
/// forall (U: Trait + ?Sized) {
/// forall (U: Trait + MetaSized) {
/// if (Self: Unsize<U>) {
/// Receiver: DispatchFromDyn<Receiver[Self => U]>
/// }
@ -567,9 +567,10 @@ fn receiver_is_dispatchable<'tcx>(
) -> bool {
debug!("receiver_is_dispatchable: method = {:?}, receiver_ty = {:?}", method, receiver_ty);
let traits = (tcx.lang_items().unsize_trait(), tcx.lang_items().dispatch_from_dyn_trait());
let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else {
debug!("receiver_is_dispatchable: Missing Unsize or DispatchFromDyn traits");
let (Some(unsize_did), Some(dispatch_from_dyn_did)) =
(tcx.lang_items().unsize_trait(), tcx.lang_items().dispatch_from_dyn_trait())
else {
debug!("receiver_is_dispatchable: Missing `Unsize` or `DispatchFromDyn` traits");
return false;
};
@ -583,7 +584,7 @@ fn receiver_is_dispatchable<'tcx>(
receiver_for_self_ty(tcx, receiver_ty, unsized_self_ty, method.def_id);
// create a modified param env, with `Self: Unsize<U>` and `U: Trait` (and all of
// its supertraits) added to caller bounds. `U: ?Sized` is already implied here.
// its supertraits) added to caller bounds. `U: MetaSized` is already implied here.
let param_env = {
// N.B. We generally want to emulate the construction of the `unnormalized_param_env`
// in the param-env query here. The fact that we don't just start with the clauses
@ -612,6 +613,12 @@ fn receiver_is_dispatchable<'tcx>(
let trait_predicate = ty::TraitRef::new_from_args(tcx, trait_def_id, args);
predicates.push(trait_predicate.upcast(tcx));
let meta_sized_predicate = {
let meta_sized_did = tcx.require_lang_item(LangItem::MetaSized, DUMMY_SP);
ty::TraitRef::new(tcx, meta_sized_did, [unsized_self_ty]).upcast(tcx)
};
predicates.push(meta_sized_predicate);
normalize_param_env_or_error(
tcx,
ty::ParamEnv::new(tcx.mk_clauses(&predicates)),

View file

@ -14,7 +14,7 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_hir::{self as hir, CoroutineDesugaring, CoroutineKind};
use rustc_infer::traits::{Obligation, PolyTraitObligation, SelectionError};
use rustc_middle::ty::fast_reject::DeepRejectCtxt;
use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode, elaborate};
use rustc_middle::ty::{self, SizedTraitKind, Ty, TypeVisitableExt, TypingMode, elaborate};
use rustc_middle::{bug, span_bug};
use tracing::{debug, instrument, trace};
@ -87,7 +87,21 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
candidates.vec.push(BuiltinCandidate { has_nested: false });
}
Some(LangItem::Sized) => {
self.assemble_builtin_sized_candidate(obligation, &mut candidates);
self.assemble_builtin_sized_candidate(
obligation,
&mut candidates,
SizedTraitKind::Sized,
);
}
Some(LangItem::MetaSized) => {
self.assemble_builtin_sized_candidate(
obligation,
&mut candidates,
SizedTraitKind::MetaSized,
);
}
Some(LangItem::PointeeSized) => {
bug!("`PointeeSized` is removed during lowering");
}
Some(LangItem::Unsize) => {
self.assemble_candidates_for_unsizing(obligation, &mut candidates);
@ -201,6 +215,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
selcx.infcx.probe(|_| {
let bound = util::lazily_elaborate_sizedness_candidate(
selcx.infcx,
obligation,
bound,
);
// We checked the polarity already
match selcx.match_normalize_trait_ref(
obligation,
@ -245,14 +265,21 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.caller_bounds()
.iter()
.filter_map(|p| p.as_trait_clause())
// Micro-optimization: filter out predicates relating to different traits.
.filter(|p| p.def_id() == stack.obligation.predicate.def_id())
// Micro-optimization: filter out predicates with different polarities.
.filter(|p| p.polarity() == stack.obligation.predicate.polarity());
let drcx = DeepRejectCtxt::relate_rigid_rigid(self.tcx());
let obligation_args = stack.obligation.predicate.skip_binder().trait_ref.args;
// Keep only those bounds which may apply, and propagate overflow if it occurs.
for bound in bounds {
let bound =
util::lazily_elaborate_sizedness_candidate(self.infcx, stack.obligation, bound);
// Micro-optimization: filter out predicates relating to different traits.
if bound.def_id() != stack.obligation.predicate.def_id() {
continue;
}
let bound_trait_ref = bound.map_bound(|t| t.trait_ref);
if !drcx.args_may_unify(obligation_args, bound_trait_ref.skip_binder().args) {
continue;
@ -1086,15 +1113,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
/// Assembles the trait which are built-in to the language itself:
/// `Copy`, `Clone` and `Sized`.
/// Assembles the `Sized` and `MetaSized` traits which are built-in to the language itself.
#[instrument(level = "debug", skip(self, candidates))]
fn assemble_builtin_sized_candidate(
&mut self,
obligation: &PolyTraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
sizedness: SizedTraitKind,
) {
match self.sized_conditions(obligation) {
match self.sizedness_conditions(obligation, sizedness) {
BuiltinImplConditions::Where(nested) => {
candidates
.vec

View file

@ -14,7 +14,9 @@ use rustc_hir::lang_items::LangItem;
use rustc_infer::infer::{DefineOpaqueTypes, HigherRankedType, InferOk};
use rustc_infer::traits::ObligationCauseCode;
use rustc_middle::traits::{BuiltinImplSource, SignatureMismatchData};
use rustc_middle::ty::{self, GenericArgsRef, Region, Ty, TyCtxt, Upcast, elaborate};
use rustc_middle::ty::{
self, GenericArgsRef, Region, SizedTraitKind, Ty, TyCtxt, Upcast, elaborate,
};
use rustc_middle::{bug, span_bug};
use rustc_span::def_id::DefId;
use thin_vec::thin_vec;
@ -164,10 +166,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
)
.break_value()
.expect("expected to index into clause that exists");
let candidate = candidate_predicate
let candidate_predicate = candidate_predicate
.as_trait_clause()
.expect("projection candidate is not a trait predicate")
.map_bound(|t| t.trait_ref);
.expect("projection candidate is not a trait predicate");
let candidate_predicate =
util::lazily_elaborate_sizedness_candidate(self.infcx, obligation, candidate_predicate);
let candidate = candidate_predicate.map_bound(|t| t.trait_ref);
let candidate = self.infcx.instantiate_binder_with_fresh_vars(
obligation.cause.span,
@ -224,6 +229,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
) -> PredicateObligations<'tcx> {
debug!(?obligation, ?param, "confirm_param_candidate");
let param = util::lazily_elaborate_sizedness_candidate(
self.infcx,
obligation,
param.upcast(self.infcx.tcx),
)
.map_bound(|p| p.trait_ref);
// During evaluation, we already checked that this
// where-clause trait-ref could be unified with the obligation
// trait-ref. Repeat that unification now without any
@ -251,7 +263,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let obligations = if has_nested {
let trait_def = obligation.predicate.def_id();
let conditions = match tcx.as_lang_item(trait_def) {
Some(LangItem::Sized) => self.sized_conditions(obligation),
Some(LangItem::Sized) => {
self.sizedness_conditions(obligation, SizedTraitKind::Sized)
}
Some(LangItem::MetaSized) => {
self.sizedness_conditions(obligation, SizedTraitKind::MetaSized)
}
Some(LangItem::PointeeSized) => {
bug!("`PointeeSized` is removing during lowering");
}
Some(LangItem::Copy | LangItem::Clone) => self.copy_clone_conditions(obligation),
Some(LangItem::FusedIterator) => self.fused_iterator_conditions(obligation),
other => bug!("unexpected builtin trait {trait_def:?} ({other:?})"),

View file

@ -27,8 +27,8 @@ use rustc_middle::ty::abstract_const::NotConstEvaluatable;
use rustc_middle::ty::error::TypeErrorToStringExt;
use rustc_middle::ty::print::{PrintTraitRefExt as _, with_no_trimmed_paths};
use rustc_middle::ty::{
self, DeepRejectCtxt, GenericArgsRef, PolyProjectionPredicate, Ty, TyCtxt, TypeFoldable,
TypeVisitableExt, TypingMode, Upcast, elaborate,
self, DeepRejectCtxt, GenericArgsRef, PolyProjectionPredicate, SizedTraitKind, Ty, TyCtxt,
TypeFoldable, TypeVisitableExt, TypingMode, Upcast, elaborate,
};
use rustc_span::{Symbol, sym};
use tracing::{debug, instrument, trace};
@ -2094,9 +2094,10 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
}
impl<'tcx> SelectionContext<'_, 'tcx> {
fn sized_conditions(
fn sizedness_conditions(
&mut self,
obligation: &PolyTraitObligation<'tcx>,
sizedness: SizedTraitKind,
) -> BuiltinImplConditions<'tcx> {
use self::BuiltinImplConditions::{Ambiguous, None, Where};
@ -2126,7 +2127,12 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
Where(ty::Binder::dummy(Vec::new()))
}
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None,
ty::Str | ty::Slice(_) | ty::Dynamic(..) => match sizedness {
SizedTraitKind::Sized => None,
SizedTraitKind::MetaSized => Where(ty::Binder::dummy(Vec::new())),
},
ty::Foreign(..) => None,
ty::Tuple(tys) => Where(
obligation.predicate.rebind(tys.last().map_or_else(Vec::new, |&last| vec![last])),
@ -2135,11 +2141,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
ty::Pat(ty, _) => Where(obligation.predicate.rebind(vec![*ty])),
ty::Adt(def, args) => {
if let Some(sized_crit) = def.sized_constraint(self.tcx()) {
if let Some(crit) = def.sizedness_constraint(self.tcx(), sizedness) {
// (*) binder moved here
Where(
obligation.predicate.rebind(vec![sized_crit.instantiate(self.tcx(), args)]),
)
Where(obligation.predicate.rebind(vec![crit.instantiate(self.tcx(), args)]))
} else {
Where(ty::Binder::dummy(Vec::new()))
}

View file

@ -4,10 +4,13 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir::LangItem;
use rustc_hir::def_id::DefId;
use rustc_infer::infer::InferCtxt;
use rustc_infer::traits::PolyTraitObligation;
pub use rustc_infer::traits::util::*;
use rustc_middle::bug;
use rustc_middle::ty::fast_reject::DeepRejectCtxt;
use rustc_middle::ty::{
self, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt,
self, PolyTraitPredicate, SizedTraitKind, TraitPredicate, TraitRef, Ty, TyCtxt, TypeFoldable,
TypeFolder, TypeSuperFoldable, TypeVisitableExt,
};
pub use rustc_next_trait_solver::placeholder::BoundVarReplacer;
use rustc_span::Span;
@ -362,15 +365,19 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for PlaceholderReplacer<'_, 'tcx> {
}
pub fn sizedness_fast_path<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tcx>) -> bool {
// Proving `Sized` very often on "obviously sized" types like `&T`, accounts for about 60%
// percentage of the predicates we have to prove. No need to canonicalize and all that for
// such cases.
// Proving `Sized`/`MetaSized`, very often on "obviously sized" types like
// `&T`, accounts for about 60% percentage of the predicates we have to prove. No need to
// canonicalize and all that for such cases.
if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_ref)) =
predicate.kind().skip_binder()
{
if tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized)
&& trait_ref.self_ty().is_trivially_sized(tcx)
{
let sizedness = match tcx.as_lang_item(trait_ref.def_id()) {
Some(LangItem::Sized) => SizedTraitKind::Sized,
Some(LangItem::MetaSized) => SizedTraitKind::MetaSized,
_ => return false,
};
if trait_ref.self_ty().has_trivial_sizedness(tcx, sizedness) {
debug!("fast path -- trivial sizedness");
return true;
}
@ -378,3 +385,39 @@ pub fn sizedness_fast_path<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tc
false
}
/// To improve performance, sizedness traits are not elaborated and so special-casing is required
/// in the trait solver to find a `Sized` candidate for a `MetaSized` obligation. Returns the
/// predicate to used in the candidate for such a `obligation`, given a `candidate`.
pub(crate) fn lazily_elaborate_sizedness_candidate<'tcx>(
infcx: &InferCtxt<'tcx>,
obligation: &PolyTraitObligation<'tcx>,
candidate: PolyTraitPredicate<'tcx>,
) -> PolyTraitPredicate<'tcx> {
if !infcx.tcx.is_lang_item(obligation.predicate.def_id(), LangItem::MetaSized)
|| !infcx.tcx.is_lang_item(candidate.def_id(), LangItem::Sized)
{
return candidate;
}
if obligation.predicate.polarity() != candidate.polarity() {
return candidate;
}
let drcx = DeepRejectCtxt::relate_rigid_rigid(infcx.tcx);
if !drcx.args_may_unify(
obligation.predicate.skip_binder().trait_ref.args,
candidate.skip_binder().trait_ref.args,
) {
return candidate;
}
candidate.map_bound(|c| TraitPredicate {
trait_ref: TraitRef::new_from_args(
infcx.tcx,
obligation.predicate.def_id(),
c.trait_ref.args,
),
polarity: c.polarity,
})
}

View file

@ -567,6 +567,14 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> {
def_id: DefId,
args: GenericArgsRef<'tcx>,
) -> PredicateObligations<'tcx> {
// PERF: `Sized`'s predicates include `MetaSized`, but both are compiler implemented marker
// traits, so `MetaSized` will always be WF if `Sized` is WF and vice-versa. Determining
// the nominal obligations of `Sized` would in-effect just elaborate `MetaSized` and make
// the compiler do a bunch of work needlessly.
if self.tcx().is_lang_item(def_id, LangItem::Sized) {
return Default::default();
}
let predicates = self.tcx().predicates_of(def_id);
let mut origins = vec![def_id; predicates.predicates.len()];
let mut head = predicates;

View file

@ -1,23 +1,29 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::LangItem;
use rustc_hir::def::DefKind;
use rustc_index::bit_set::DenseBitSet;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::bug;
use rustc_middle::query::Providers;
use rustc_middle::ty::{
self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, Upcast, fold_regions,
self, SizedTraitKind, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, Upcast,
fold_regions,
};
use rustc_span::DUMMY_SP;
use rustc_span::def_id::{CRATE_DEF_ID, DefId, LocalDefId};
use rustc_trait_selection::traits;
use tracing::instrument;
/// If `ty` implements the given `sizedness` trait, returns `None`. Otherwise, returns the type
/// that must implement the given `sizedness` for `ty` to implement it.
#[instrument(level = "debug", skip(tcx), ret)]
fn sized_constraint_for_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
fn sizedness_constraint_for_ty<'tcx>(
tcx: TyCtxt<'tcx>,
sizedness: SizedTraitKind,
ty: Ty<'tcx>,
) -> Option<Ty<'tcx>> {
match ty.kind() {
// these are always sized
// Always `Sized` or `MetaSized`
ty::Bool
| ty::Char
| ty::Int(..)
@ -35,31 +41,40 @@ fn sized_constraint_for_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'
| ty::Never
| ty::Dynamic(_, _, ty::DynStar) => None,
// these are never sized
ty::Str | ty::Slice(..) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => Some(ty),
ty::Str | ty::Slice(..) | ty::Dynamic(_, _, ty::Dyn) => match sizedness {
// Never `Sized`
SizedTraitKind::Sized => Some(ty),
// Always `MetaSized`
SizedTraitKind::MetaSized => None,
},
ty::Pat(ty, _) => sized_constraint_for_ty(tcx, *ty),
ty::Tuple(tys) => tys.last().and_then(|&ty| sized_constraint_for_ty(tcx, ty)),
// recursive case
ty::Adt(adt, args) => adt.sized_constraint(tcx).and_then(|intermediate| {
let ty = intermediate.instantiate(tcx, args);
sized_constraint_for_ty(tcx, ty)
}),
// these can be sized or unsized.
// Maybe `Sized` or `MetaSized`
ty::Param(..) | ty::Alias(..) | ty::Error(_) => Some(ty),
// We cannot instantiate the binder, so just return the *original* type back,
// but only if the inner type has a sized constraint. Thus we skip the binder,
// but don't actually use the result from `sized_constraint_for_ty`.
ty::UnsafeBinder(inner_ty) => {
sized_constraint_for_ty(tcx, inner_ty.skip_binder()).map(|_| ty)
sizedness_constraint_for_ty(tcx, sizedness, inner_ty.skip_binder()).map(|_| ty)
}
// Never `MetaSized` or `Sized`
ty::Foreign(..) => Some(ty),
// Recursive cases
ty::Pat(ty, _) => sizedness_constraint_for_ty(tcx, sizedness, *ty),
ty::Tuple(tys) => {
tys.last().and_then(|&ty| sizedness_constraint_for_ty(tcx, sizedness, ty))
}
ty::Adt(adt, args) => adt.sizedness_constraint(tcx, sizedness).and_then(|intermediate| {
let ty = intermediate.instantiate(tcx, args);
sizedness_constraint_for_ty(tcx, sizedness, ty)
}),
ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) => {
bug!("unexpected type `{ty:?}` in sized_constraint_for_ty")
bug!("unexpected type `{ty:?}` in `sizedness_constraint_for_ty`")
}
}
}
@ -75,15 +90,22 @@ fn defaultness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Defaultness {
}
}
/// Calculates the `Sized` constraint.
/// Returns the type of the last field of a struct ("the constraint") which must implement the
/// `sizedness` trait for the whole ADT to be considered to implement that `sizedness` trait.
/// `def_id` is assumed to be the `AdtDef` of a struct and will panic otherwise.
///
/// In fact, there are only a few options for the types in the constraint:
/// - an obviously-unsized type
/// For `Sized`, there are only a few options for the types in the constraint:
/// - an meta-sized type (str, slices, trait objects, etc)
/// - an pointee-sized type (extern types)
/// - a type parameter or projection whose sizedness can't be known
///
/// For `MetaSized`, there are only a few options for the types in the constraint:
/// - an pointee-sized type (extern types)
/// - a type parameter or projection whose sizedness can't be known
#[instrument(level = "debug", skip(tcx), ret)]
fn adt_sized_constraint<'tcx>(
fn adt_sizedness_constraint<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
(def_id, sizedness): (DefId, SizedTraitKind),
) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
if let Some(def_id) = def_id.as_local() {
if let ty::Representability::Infinite(_) = tcx.representability(def_id) {
@ -93,21 +115,21 @@ fn adt_sized_constraint<'tcx>(
let def = tcx.adt_def(def_id);
if !def.is_struct() {
bug!("`adt_sized_constraint` called on non-struct type: {def:?}");
bug!("`adt_sizedness_constraint` called on non-struct type: {def:?}");
}
let tail_def = def.non_enum_variant().tail_opt()?;
let tail_ty = tcx.type_of(tail_def.did).instantiate_identity();
let constraint_ty = sized_constraint_for_ty(tcx, tail_ty)?;
let constraint_ty = sizedness_constraint_for_ty(tcx, sizedness, tail_ty)?;
// perf hack: if there is a `constraint_ty: Sized` bound, then we know
// perf hack: if there is a `constraint_ty: {Meta,}Sized` bound, then we know
// that the type is sized and do not need to check it on the impl.
let sized_trait_def_id = tcx.require_lang_item(LangItem::Sized, DUMMY_SP);
let sizedness_trait_def_id = sizedness.require_lang_item(tcx);
let predicates = tcx.predicates_of(def.did()).predicates;
if predicates.iter().any(|(p, _)| {
p.as_trait_clause().is_some_and(|trait_pred| {
trait_pred.def_id() == sized_trait_def_id
trait_pred.def_id() == sizedness_trait_def_id
&& trait_pred.self_ty().skip_binder() == constraint_ty
})
}) {
@ -369,7 +391,7 @@ fn impl_self_is_guaranteed_unsized<'tcx>(tcx: TyCtxt<'tcx>, impl_def_id: DefId)
pub(crate) fn provide(providers: &mut Providers) {
*providers = Providers {
asyncness,
adt_sized_constraint,
adt_sizedness_constraint,
param_env,
typing_env_normalized_for_post_analysis,
defaultness,

View file

@ -4,6 +4,7 @@ use smallvec::smallvec;
use crate::data_structures::HashSet;
use crate::inherent::*;
use crate::lang_items::TraitSolverLangItem;
use crate::outlives::{Component, push_outlives_components};
use crate::{self as ty, Interner, Upcast as _};
@ -18,6 +19,7 @@ pub struct Elaborator<I: Interner, O> {
stack: Vec<O>,
visited: HashSet<ty::Binder<I, ty::PredicateKind<I>>>,
mode: Filter,
elaborate_sized: ElaborateSized,
}
enum Filter {
@ -25,6 +27,12 @@ enum Filter {
OnlySelf,
}
#[derive(Eq, PartialEq)]
enum ElaborateSized {
Yes,
No,
}
/// Describes how to elaborate an obligation into a sub-obligation.
pub trait Elaboratable<I: Interner> {
fn predicate(&self) -> I::Predicate;
@ -77,13 +85,19 @@ pub fn elaborate<I: Interner, O: Elaboratable<I>>(
cx: I,
obligations: impl IntoIterator<Item = O>,
) -> Elaborator<I, O> {
let mut elaborator =
Elaborator { cx, stack: Vec::new(), visited: HashSet::default(), mode: Filter::All };
let mut elaborator = Elaborator {
cx,
stack: Vec::new(),
visited: HashSet::default(),
mode: Filter::All,
elaborate_sized: ElaborateSized::No,
};
elaborator.extend_deduped(obligations);
elaborator
}
impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
/// Adds `obligations` to the stack.
fn extend_deduped(&mut self, obligations: impl IntoIterator<Item = O>) {
// Only keep those bounds that we haven't already seen.
// This is necessary to prevent infinite recursion in some
@ -103,6 +117,13 @@ impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
self
}
/// Start elaborating `Sized` - reqd during coherence checking, normally skipped to improve
/// compiler performance.
pub fn elaborate_sized(mut self) -> Self {
self.elaborate_sized = ElaborateSized::Yes;
self
}
fn elaborate(&mut self, elaboratable: &O) {
let cx = self.cx;
@ -111,6 +132,19 @@ impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
return;
};
// PERF(sized-hierarchy): To avoid iterating over sizedness supertraits in
// parameter environments, as an optimisation, sizedness supertraits aren't
// elaborated, so check if a `Sized` obligation is being elaborated to a
// `MetaSized` obligation and emit it. Candidate assembly and confirmation
// are modified to check for the `Sized` subtrait when a `MetaSized` obligation
// is present.
if self.elaborate_sized == ElaborateSized::No
&& let Some(did) = clause.as_trait_clause().map(|c| c.def_id())
&& self.cx.is_lang_item(did, TraitSolverLangItem::Sized)
{
return;
}
let bound_clause = clause.kind();
match bound_clause.skip_binder() {
ty::ClauseKind::Trait(data) => {

View file

@ -11,7 +11,7 @@ use rustc_ast_ir::Mutability;
use crate::elaborate::Elaboratable;
use crate::fold::{TypeFoldable, TypeSuperFoldable};
use crate::relate::Relate;
use crate::solve::AdtDestructorKind;
use crate::solve::{AdtDestructorKind, SizedTraitKind};
use crate::visit::{Flags, TypeSuperVisitable, TypeVisitable};
use crate::{self as ty, CollectAndApply, Interner, UpcastFrom};
@ -571,7 +571,11 @@ pub trait AdtDef<I: Interner>: Copy + Debug + Hash + Eq {
// FIXME: perhaps use `all_fields` and expose `FieldDef`.
fn all_field_tys(self, interner: I) -> ty::EarlyBinder<I, impl IntoIterator<Item = I::Ty>>;
fn sized_constraint(self, interner: I) -> Option<ty::EarlyBinder<I, I::Ty>>;
fn sizedness_constraint(
self,
interner: I,
sizedness: SizedTraitKind,
) -> Option<ty::EarlyBinder<I, I::Ty>>;
fn is_fundamental(self) -> bool;

View file

@ -30,7 +30,9 @@ pub enum TraitSolverLangItem {
FutureOutput,
Iterator,
Metadata,
MetaSized,
Option,
PointeeSized,
PointeeTrait,
Poll,
Sized,

View file

@ -8,6 +8,7 @@ use derive_where::derive_where;
use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext};
use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic};
use crate::lang_items::TraitSolverLangItem;
use crate::search_graph::PathKind;
use crate::{self as ty, Canonical, CanonicalVarValues, Interner, Upcast};
@ -366,3 +367,24 @@ pub enum AdtDestructorKind {
NotConst,
Const,
}
/// Which sizedness trait - `Sized`, `MetaSized`? `PointeeSized` is omitted as it is removed during
/// lowering.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
pub enum SizedTraitKind {
/// `Sized` trait
Sized,
/// `MetaSized` trait
MetaSized,
}
impl SizedTraitKind {
/// Returns `DefId` of corresponding language item.
pub fn require_lang_item<I: Interner>(self, cx: I) -> I::DefId {
cx.require_lang_item(match self {
SizedTraitKind::Sized => TraitSolverLangItem::Sized,
SizedTraitKind::MetaSized => TraitSolverLangItem::MetaSized,
})
}
}

View file

@ -1,6 +1,4 @@
// FIXME(static_mut_refs): Do not allow `static_mut_refs` lint
#![allow(static_mut_refs)]
use std::cell::Cell;
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::thread;
@ -58,48 +56,33 @@ fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
v.iter().cloned().collect()
}
/// Starting from the head of the LinkedList,
/// follow the next links, while checking the prev links,
/// and check that length equals the count of visited nodes.
fn check_links<T>(list: &LinkedList<T>) {
unsafe {
let mut len = 0;
let mut last_ptr: Option<&Node<T>> = None;
let mut node_ptr: &Node<T>;
match list.head {
None => {
// tail node should also be None.
assert!(list.tail.is_none());
assert_eq!(0, list.len);
return;
}
Some(node) => node_ptr = &*node.as_ptr(),
}
loop {
match (last_ptr, node_ptr.prev) {
(None, None) => {}
(None, _) => panic!("prev link for head"),
(Some(p), Some(pptr)) => {
assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
}
_ => panic!("prev link is none, not good"),
}
match node_ptr.next {
Some(next) => {
last_ptr = Some(node_ptr);
node_ptr = &*next.as_ptr();
len += 1;
}
None => {
len += 1;
break;
}
}
}
let mut node: &Node<T> = if let Some(node) = list.head {
// SAFETY: depends on correctness of LinkedList
unsafe { &*node.as_ptr() }
} else {
assert!(list.tail.is_none(), "empty list should have no tail node");
assert_eq!(list.len, 0, "empty list should have length 0");
return;
};
// verify that the tail node points to the last node.
let tail = list.tail.as_ref().expect("some tail node").as_ref();
assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
// check that len matches interior links.
assert_eq!(len, list.len);
assert!(node.prev.is_none(), "head node should not have a prev link");
let mut prev;
let mut len = 1;
while let Some(next) = node.next {
prev = node;
// SAFETY: depends on correctness of LinkedList
node = unsafe { &*next.as_ptr() };
len += 1;
assert_eq!(node.prev.expect("missing prev link"), prev.into(), "bad prev link");
}
let tail = list.tail.expect("list is non-empty, so there should be a tail node");
assert_eq!(tail, node.into(), "tail node points to the last node");
assert_eq!(len, list.len, "len matches interior links");
}
#[test]
@ -1027,21 +1010,26 @@ fn extract_if_drop_panic_leak() {
assert_eq!(d7.dropped(), 1);
}
macro_rules! struct_with_counted_drop {
($struct_name:ident$(($elt_ty:ty))?, $drop_counter:ident $(=> $drop_stmt:expr)?) => {
thread_local! {static $drop_counter: Cell<u32> = Cell::new(0);}
struct $struct_name$(($elt_ty))?;
impl Drop for $struct_name {
fn drop(&mut self) {
$drop_counter.set($drop_counter.get() + 1);
$($drop_stmt(self))?
}
}
};
}
#[test]
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn extract_if_pred_panic_leak() {
static mut DROPS: i32 = 0;
#[derive(Debug)]
struct D(u32);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
}
}
struct_with_counted_drop!(D(u32), DROPS);
let mut q = LinkedList::new();
q.push_back(D(3));
@ -1053,26 +1041,17 @@ fn extract_if_pred_panic_leak() {
q.push_front(D(1));
q.push_front(D(0));
catch_unwind(AssertUnwindSafe(|| {
_ = catch_unwind(AssertUnwindSafe(|| {
q.extract_if(|item| if item.0 >= 2 { panic!() } else { true }).for_each(drop)
}))
.ok();
}));
assert_eq!(unsafe { DROPS }, 2); // 0 and 1
assert_eq!(DROPS.get(), 2); // 0 and 1
assert_eq!(q.len(), 6);
}
#[test]
fn test_drop() {
static mut DROPS: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
}
}
struct_with_counted_drop!(Elem, DROPS);
let mut ring = LinkedList::new();
ring.push_back(Elem);
@ -1081,20 +1060,12 @@ fn test_drop() {
ring.push_front(Elem);
drop(ring);
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(DROPS.get(), 4);
}
#[test]
fn test_drop_with_pop() {
static mut DROPS: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
}
}
struct_with_counted_drop!(Elem, DROPS);
let mut ring = LinkedList::new();
ring.push_back(Elem);
@ -1104,23 +1075,15 @@ fn test_drop_with_pop() {
drop(ring.pop_back());
drop(ring.pop_front());
assert_eq!(unsafe { DROPS }, 2);
assert_eq!(DROPS.get(), 2);
drop(ring);
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(DROPS.get(), 4);
}
#[test]
fn test_drop_clear() {
static mut DROPS: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
}
}
struct_with_counted_drop!(Elem, DROPS);
let mut ring = LinkedList::new();
ring.push_back(Elem);
@ -1128,30 +1091,16 @@ fn test_drop_clear() {
ring.push_back(Elem);
ring.push_front(Elem);
ring.clear();
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(DROPS.get(), 4);
drop(ring);
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(DROPS.get(), 4);
}
#[test]
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_drop_panic() {
static mut DROPS: i32 = 0;
struct D(bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.0 {
panic!("panic in `drop`");
}
}
}
struct_with_counted_drop!(D(bool), DROPS => |this: &D| if this.0 { panic!("panic in `drop`"); } );
let mut q = LinkedList::new();
q.push_back(D(false));
@ -1165,7 +1114,7 @@ fn test_drop_panic() {
catch_unwind(move || drop(q)).ok();
assert_eq!(unsafe { DROPS }, 8);
assert_eq!(DROPS.get(), 8);
}
#[test]

View file

@ -1489,10 +1489,11 @@ impl String {
Some(ch)
}
/// Removes a [`char`] from this `String` at a byte position and returns it.
/// Removes a [`char`] from this `String` at byte position `idx` and returns it.
///
/// This is an *O*(*n*) operation, as it requires copying every element in the
/// buffer.
/// Copies all bytes after the removed char to new positions.
///
/// Note that calling this in a loop can result in quadratic behavior.
///
/// # Panics
///
@ -1678,10 +1679,13 @@ impl String {
drop(guard);
}
/// Inserts a character into this `String` at a byte position.
/// Inserts a character into this `String` at byte position `idx`.
///
/// This is an *O*(*n*) operation as it requires copying every element in the
/// buffer.
/// Reallocates if `self.capacity()` is insufficient, which may involve copying all
/// `self.capacity()` bytes. Makes space for the insertion by copying all bytes of
/// `&self[idx..]` to new positions.
///
/// Note that calling this in a loop can result in quadratic behavior.
///
/// # Panics
///
@ -1733,10 +1737,13 @@ impl String {
}
}
/// Inserts a string slice into this `String` at a byte position.
/// Inserts a string slice into this `String` at byte position `idx`.
///
/// This is an *O*(*n*) operation as it requires copying every element in the
/// buffer.
/// Reallocates if `self.capacity()` is insufficient, which may involve copying all
/// `self.capacity()` bytes. Makes space for the insertion by copying all bytes of
/// `&self[idx..]` to new positions.
///
/// Note that calling this in a loop can result in quadratic behavior.
///
/// # Panics
///

View file

@ -224,6 +224,13 @@ impl<T, const N: usize> IntoIter<T, N> {
}
}
#[stable(feature = "array_value_iter_default", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> Default for IntoIter<T, N> {
fn default() -> Self {
IntoIter::empty()
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T, const N: usize> Iterator for IntoIter<T, N> {
type Item = T;

View file

@ -719,7 +719,7 @@ impl<T, const N: usize> Cell<[T; N]> {
#[rustc_diagnostic_item = "RefCell"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RefCell<T: ?Sized> {
borrow: Cell<BorrowFlag>,
borrow: Cell<BorrowCounter>,
// Stores the location of the earliest currently active borrow.
// This gets updated whenever we go from having zero borrows
// to having a single borrow. When a borrow occurs, this gets included
@ -732,54 +732,48 @@ pub struct RefCell<T: ?Sized> {
/// An error returned by [`RefCell::try_borrow`].
#[stable(feature = "try_borrow", since = "1.13.0")]
#[non_exhaustive]
#[derive(Debug)]
pub struct BorrowError {
#[cfg(feature = "debug_refcell")]
location: &'static crate::panic::Location<'static>,
}
#[stable(feature = "try_borrow", since = "1.13.0")]
impl Debug for BorrowError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut builder = f.debug_struct("BorrowError");
#[cfg(feature = "debug_refcell")]
builder.field("location", self.location);
builder.finish()
}
}
#[stable(feature = "try_borrow", since = "1.13.0")]
impl Display for BorrowError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt("already mutably borrowed", f)
#[cfg(feature = "debug_refcell")]
let res = write!(
f,
"RefCell already mutably borrowed; a previous borrow was at {}",
self.location
);
#[cfg(not(feature = "debug_refcell"))]
let res = Display::fmt("RefCell already mutably borrowed", f);
res
}
}
/// An error returned by [`RefCell::try_borrow_mut`].
#[stable(feature = "try_borrow", since = "1.13.0")]
#[non_exhaustive]
#[derive(Debug)]
pub struct BorrowMutError {
#[cfg(feature = "debug_refcell")]
location: &'static crate::panic::Location<'static>,
}
#[stable(feature = "try_borrow", since = "1.13.0")]
impl Debug for BorrowMutError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut builder = f.debug_struct("BorrowMutError");
#[cfg(feature = "debug_refcell")]
builder.field("location", self.location);
builder.finish()
}
}
#[stable(feature = "try_borrow", since = "1.13.0")]
impl Display for BorrowMutError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt("already borrowed", f)
#[cfg(feature = "debug_refcell")]
let res = write!(f, "RefCell already borrowed; a previous borrow was at {}", self.location);
#[cfg(not(feature = "debug_refcell"))]
let res = Display::fmt("RefCell already borrowed", f);
res
}
}
@ -788,7 +782,7 @@ impl Display for BorrowMutError {
#[track_caller]
#[cold]
fn panic_already_borrowed(err: BorrowMutError) -> ! {
panic!("already borrowed: {:?}", err)
panic!("{err}")
}
// This ensures the panicking code is outlined from `borrow` for `RefCell`.
@ -796,7 +790,7 @@ fn panic_already_borrowed(err: BorrowMutError) -> ! {
#[track_caller]
#[cold]
fn panic_already_mutably_borrowed(err: BorrowError) -> ! {
panic!("already mutably borrowed: {:?}", err)
panic!("{err}")
}
// Positive values represent the number of `Ref` active. Negative values
@ -806,22 +800,22 @@ fn panic_already_mutably_borrowed(err: BorrowError) -> ! {
//
// `Ref` and `RefMut` are both two words in size, and so there will likely never
// be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize`
// range. Thus, a `BorrowFlag` will probably never overflow or underflow.
// range. Thus, a `BorrowCounter` will probably never overflow or underflow.
// However, this is not a guarantee, as a pathological program could repeatedly
// create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must
// explicitly check for overflow and underflow in order to avoid unsafety, or at
// least behave correctly in the event that overflow or underflow happens (e.g.,
// see BorrowRef::new).
type BorrowFlag = isize;
const UNUSED: BorrowFlag = 0;
type BorrowCounter = isize;
const UNUSED: BorrowCounter = 0;
#[inline(always)]
fn is_writing(x: BorrowFlag) -> bool {
fn is_writing(x: BorrowCounter) -> bool {
x < UNUSED
}
#[inline(always)]
fn is_reading(x: BorrowFlag) -> bool {
fn is_reading(x: BorrowCounter) -> bool {
x > UNUSED
}
@ -1401,12 +1395,12 @@ impl<T> From<T> for RefCell<T> {
impl<T: CoerceUnsized<U>, U> CoerceUnsized<RefCell<U>> for RefCell<T> {}
struct BorrowRef<'b> {
borrow: &'b Cell<BorrowFlag>,
borrow: &'b Cell<BorrowCounter>,
}
impl<'b> BorrowRef<'b> {
#[inline]
fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRef<'b>> {
fn new(borrow: &'b Cell<BorrowCounter>) -> Option<BorrowRef<'b>> {
let b = borrow.get().wrapping_add(1);
if !is_reading(b) {
// Incrementing borrow can result in a non-reading value (<= 0) in these cases:
@ -1447,7 +1441,7 @@ impl Clone for BorrowRef<'_> {
debug_assert!(is_reading(borrow));
// Prevent the borrow counter from overflowing into
// a writing borrow.
assert!(borrow != BorrowFlag::MAX);
assert!(borrow != BorrowCounter::MAX);
self.borrow.set(borrow + 1);
BorrowRef { borrow: self.borrow }
}
@ -1795,7 +1789,7 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
}
struct BorrowRefMut<'b> {
borrow: &'b Cell<BorrowFlag>,
borrow: &'b Cell<BorrowCounter>,
}
impl Drop for BorrowRefMut<'_> {
@ -1809,7 +1803,7 @@ impl Drop for BorrowRefMut<'_> {
impl<'b> BorrowRefMut<'b> {
#[inline]
fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRefMut<'b>> {
fn new(borrow: &'b Cell<BorrowCounter>) -> Option<BorrowRefMut<'b>> {
// NOTE: Unlike BorrowRefMut::clone, new is called to create the initial
// mutable reference, and so there must currently be no existing
// references. Thus, while clone increments the mutable refcount, here
@ -1833,7 +1827,7 @@ impl<'b> BorrowRefMut<'b> {
let borrow = self.borrow.get();
debug_assert!(is_writing(borrow));
// Prevent the borrow counter from underflowing.
assert!(borrow != BorrowFlag::MIN);
assert!(borrow != BorrowCounter::MIN);
self.borrow.set(borrow - 1);
BorrowRefMut { borrow: self.borrow }
}

View file

@ -36,6 +36,8 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::marker::PointeeSized;
mod uninit;
/// A common trait that allows explicit creation of a duplicate value.
@ -283,7 +285,7 @@ impl_use_cloned! {
reason = "deriving hack, should not be public",
issue = "none"
)]
pub struct AssertParamIsClone<T: Clone + ?Sized> {
pub struct AssertParamIsClone<T: Clone + PointeeSized> {
_field: crate::marker::PhantomData<T>,
}
#[doc(hidden)]
@ -293,7 +295,7 @@ pub struct AssertParamIsClone<T: Clone + ?Sized> {
reason = "deriving hack, should not be public",
issue = "none"
)]
pub struct AssertParamIsCopy<T: Copy + ?Sized> {
pub struct AssertParamIsCopy<T: Copy + PointeeSized> {
_field: crate::marker::PhantomData<T>,
}
@ -530,6 +532,8 @@ unsafe impl CloneToUninit for crate::bstr::ByteStr {
/// are implemented in `traits::SelectionContext::copy_clone_conditions()`
/// in `rustc_trait_selection`.
mod impls {
use crate::marker::PointeeSized;
macro_rules! impl_clone {
($($t:ty)*) => {
$(
@ -560,7 +564,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for *const T {
impl<T: PointeeSized> Clone for *const T {
#[inline(always)]
fn clone(&self) -> Self {
*self
@ -568,7 +572,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for *mut T {
impl<T: PointeeSized> Clone for *mut T {
#[inline(always)]
fn clone(&self) -> Self {
*self
@ -577,7 +581,7 @@ mod impls {
/// Shared references can be cloned, but mutable references *cannot*!
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for &T {
impl<T: PointeeSized> Clone for &T {
#[inline(always)]
#[rustc_diagnostic_item = "noop_method_clone"]
fn clone(&self) -> Self {
@ -587,5 +591,5 @@ mod impls {
/// Shared references can be cloned, but mutable references *cannot*!
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> !Clone for &mut T {}
impl<T: PointeeSized> !Clone for &mut T {}
}

View file

@ -29,6 +29,7 @@ mod bytewise;
pub(crate) use bytewise::BytewiseEq;
use self::Ordering::*;
use crate::marker::PointeeSized;
use crate::ops::ControlFlow;
/// Trait for comparisons using the equality operator.
@ -246,7 +247,7 @@ use crate::ops::ControlFlow;
append_const_msg
)]
#[rustc_diagnostic_item = "PartialEq"]
pub trait PartialEq<Rhs: ?Sized = Self> {
pub trait PartialEq<Rhs: PointeeSized = Self>: PointeeSized {
/// Tests for `self` and `other` values to be equal, and is used by `==`.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
@ -332,7 +333,7 @@ pub macro PartialEq($item:item) {
#[doc(alias = "!=")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Eq"]
pub trait Eq: PartialEq<Self> {
pub trait Eq: PartialEq<Self> + PointeeSized {
// this method is used solely by `impl Eq or #[derive(Eq)]` to assert that every component of a
// type implements `Eq` itself. The current deriving infrastructure means doing this assertion
// without using a method on this trait is nearly impossible.
@ -361,7 +362,7 @@ pub macro Eq($item:item) {
#[doc(hidden)]
#[allow(missing_debug_implementations)]
#[unstable(feature = "derive_eq", reason = "deriving hack, should not be public", issue = "none")]
pub struct AssertParamIsEq<T: Eq + ?Sized> {
pub struct AssertParamIsEq<T: Eq + PointeeSized> {
_field: crate::marker::PhantomData<T>,
}
@ -954,7 +955,7 @@ impl<T: Clone> Clone for Reverse<T> {
#[doc(alias = ">=")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Ord"]
pub trait Ord: Eq + PartialOrd<Self> {
pub trait Ord: Eq + PartialOrd<Self> + PointeeSized {
/// This method returns an [`Ordering`] between `self` and `other`.
///
/// By convention, `self.cmp(&other)` returns the ordering matching the expression
@ -1337,7 +1338,8 @@ pub macro Ord($item:item) {
append_const_msg
)]
#[rustc_diagnostic_item = "PartialOrd"]
pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
#[allow(multiple_supertrait_upcastable)] // FIXME(sized_hierarchy): remove this
pub trait PartialOrd<Rhs: PointeeSized = Self>: PartialEq<Rhs> + PointeeSized {
/// This method returns an ordering between `self` and `other` values if one exists.
///
/// # Examples
@ -1481,7 +1483,7 @@ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
}
}
fn default_chaining_impl<T: ?Sized, U: ?Sized>(
fn default_chaining_impl<T: PointeeSized, U: PointeeSized>(
lhs: &T,
rhs: &U,
p: impl FnOnce(Ordering) -> bool,
@ -1803,6 +1805,7 @@ where
mod impls {
use crate::cmp::Ordering::{self, Equal, Greater, Less};
use crate::hint::unreachable_unchecked;
use crate::marker::PointeeSized;
use crate::ops::ControlFlow::{self, Break, Continue};
macro_rules! partial_eq_impl {
@ -2015,7 +2018,7 @@ mod impls {
// & pointers
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialEq<&B> for &A
impl<A: PointeeSized, B: PointeeSized> PartialEq<&B> for &A
where
A: PartialEq<B>,
{
@ -2029,7 +2032,7 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialOrd<&B> for &A
impl<A: PointeeSized, B: PointeeSized> PartialOrd<&B> for &A
where
A: PartialOrd<B>,
{
@ -2071,7 +2074,7 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized> Ord for &A
impl<A: PointeeSized> Ord for &A
where
A: Ord,
{
@ -2081,12 +2084,12 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized> Eq for &A where A: Eq {}
impl<A: PointeeSized> Eq for &A where A: Eq {}
// &mut pointers
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialEq<&mut B> for &mut A
impl<A: PointeeSized, B: PointeeSized> PartialEq<&mut B> for &mut A
where
A: PartialEq<B>,
{
@ -2100,7 +2103,7 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialOrd<&mut B> for &mut A
impl<A: PointeeSized, B: PointeeSized> PartialOrd<&mut B> for &mut A
where
A: PartialOrd<B>,
{
@ -2142,7 +2145,7 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized> Ord for &mut A
impl<A: PointeeSized> Ord for &mut A
where
A: Ord,
{
@ -2152,10 +2155,10 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized> Eq for &mut A where A: Eq {}
impl<A: PointeeSized> Eq for &mut A where A: Eq {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialEq<&mut B> for &A
impl<A: PointeeSized, B: PointeeSized> PartialEq<&mut B> for &A
where
A: PartialEq<B>,
{
@ -2170,7 +2173,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: ?Sized, B: ?Sized> PartialEq<&B> for &mut A
impl<A: PointeeSized, B: PointeeSized> PartialEq<&B> for &mut A
where
A: PartialEq<B>,
{

Some files were not shown because too many files have changed in this diff Show more