Merge pull request #4440 from RalfJung/rustup

Rustup
This commit is contained in:
Ralf Jung 2025-07-03 08:56:11 +00:00 committed by GitHub
commit 3c3f1e461c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
160 changed files with 2682 additions and 1464 deletions

View file

@ -467,6 +467,15 @@
# Whether to use the precompiled stage0 libtest with compiletest.
#build.compiletest-use-stage0-libtest = true
# Default value for the `--extra-checks` flag of tidy.
#
# See `./x test tidy --help` for details.
#
# Note that if any value is manually given to bootstrap such as
# `./x test tidy --extra-checks=js`, this value is ignored.
# Use `--extra-checks=''` to temporarily disable all extra checks.
#build.tidy-extra-checks = ""
# Indicates whether ccache is used when building certain artifacts (e.g. LLVM).
# Set to `true` to use the first `ccache` in PATH, or set an absolute path to use
# a specific version.

View file

@ -385,8 +385,8 @@ pub enum GenericParamKind {
},
Const {
ty: P<Ty>,
/// Span of the `const` keyword.
kw_span: Span,
/// Span of the whole parameter definition, including default.
span: Span,
/// Optional default value for the const generic param.
default: Option<AnonConst>,
},
@ -410,10 +410,7 @@ impl GenericParam {
self.ident.span
}
GenericParamKind::Type { default: Some(ty) } => self.ident.span.to(ty.span),
GenericParamKind::Const { kw_span, default: Some(default), .. } => {
kw_span.to(default.value.span)
}
GenericParamKind::Const { kw_span, default: None, ty } => kw_span.to(ty.span),
GenericParamKind::Const { span, .. } => *span,
}
}
}
@ -1390,6 +1387,7 @@ impl Expr {
path.clone(),
TraitBoundModifiers::NONE,
self.span,
Parens::No,
))),
_ => None,
}
@ -3366,6 +3364,13 @@ pub struct TraitRef {
pub ref_id: NodeId,
}
/// Whether enclosing parentheses are present or not.
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum Parens {
Yes,
No,
}
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct PolyTraitRef {
/// The `'a` in `for<'a> Foo<&'a T>`.
@ -3378,6 +3383,10 @@ pub struct PolyTraitRef {
pub trait_ref: TraitRef,
pub span: Span,
/// When `Yes`, the first and last character of `span` are an opening
/// and a closing paren respectively.
pub parens: Parens,
}
impl PolyTraitRef {
@ -3386,12 +3395,14 @@ impl PolyTraitRef {
path: Path,
modifiers: TraitBoundModifiers,
span: Span,
parens: Parens,
) -> Self {
PolyTraitRef {
bound_generic_params: generic_params,
modifiers,
trait_ref: TraitRef { path, ref_id: DUMMY_NODE_ID },
span,
parens,
}
}
}

View file

@ -1142,7 +1142,7 @@ macro_rules! common_visitor_and_walkers {
vis: &mut V,
p: &$($lt)? $($mut)? PolyTraitRef,
) -> V::Result {
let PolyTraitRef { bound_generic_params, modifiers, trait_ref, span } = p;
let PolyTraitRef { bound_generic_params, modifiers, trait_ref, span, parens: _ } = p;
try_visit!(visit_modifiers(vis, modifiers));
try_visit!(visit_generic_params(vis, bound_generic_params));
try_visit!(vis.visit_trait_ref(trait_ref));
@ -1350,9 +1350,10 @@ macro_rules! common_visitor_and_walkers {
match kind {
GenericParamKind::Lifetime => (),
GenericParamKind::Type { default } => visit_opt!(vis, visit_ty, default),
GenericParamKind::Const { ty, default, kw_span: _ } => {
GenericParamKind::Const { ty, default, span } => {
try_visit!(vis.visit_ty(ty));
visit_opt!(vis, visit_anon_const, default);
try_visit!(visit_span(vis, span));
}
}
if let Some(sp) = colon_span {

View file

@ -1209,6 +1209,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
modifiers: TraitBoundModifiers::NONE,
trait_ref: TraitRef { path: path.clone(), ref_id: t.id },
span: t.span,
parens: ast::Parens::No,
},
itctx,
);
@ -1959,7 +1960,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
(hir::ParamName::Plain(self.lower_ident(param.ident)), kind)
}
GenericParamKind::Const { ty, kw_span: _, default } => {
GenericParamKind::Const { ty, span: _, default } => {
let ty = self
.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::GenericDefault));

View file

@ -909,11 +909,11 @@ fn validate_generic_param_order(dcx: DiagCtxtHandle<'_>, generics: &[GenericPara
}
GenericParamKind::Type { default: None } => (),
GenericParamKind::Lifetime => (),
GenericParamKind::Const { ty: _, kw_span: _, default: Some(default) } => {
GenericParamKind::Const { ty: _, span: _, default: Some(default) } => {
ordered_params += " = ";
ordered_params += &pprust::expr_to_string(&default.value);
}
GenericParamKind::Const { ty: _, kw_span: _, default: None } => (),
GenericParamKind::Const { ty: _, span: _, default: None } => (),
}
first = false;
}

View file

@ -3,7 +3,10 @@ use rustc_feature::{AttributeTemplate, template};
use rustc_session::parse::feature_err;
use rustc_span::{Span, Symbol, sym};
use super::{AcceptMapping, AttributeOrder, AttributeParser, OnDuplicate, SingleAttributeParser};
use super::{
AcceptMapping, AttributeOrder, AttributeParser, NoArgsAttributeParser, OnDuplicate,
SingleAttributeParser,
};
use crate::context::{AcceptContext, FinalizeContext, Stage};
use crate::parser::ArgParser;
use crate::session_diagnostics::{NakedFunctionIncompatibleAttribute, NullOnExport};
@ -43,20 +46,10 @@ impl<S: Stage> SingleAttributeParser<S> for OptimizeParser {
pub(crate) struct ColdParser;
impl<S: Stage> SingleAttributeParser<S> for ColdParser {
impl<S: Stage> NoArgsAttributeParser<S> for ColdParser {
const PATH: &[Symbol] = &[sym::cold];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
return None;
}
Some(AttributeKind::Cold(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::Cold;
}
pub(crate) struct ExportNameParser;
@ -194,39 +187,17 @@ impl<S: Stage> AttributeParser<S> for NakedParser {
}
pub(crate) struct TrackCallerParser;
impl<S: Stage> SingleAttributeParser<S> for TrackCallerParser {
impl<S: Stage> NoArgsAttributeParser<S> for TrackCallerParser {
const PATH: &[Symbol] = &[sym::track_caller];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
return None;
}
Some(AttributeKind::TrackCaller(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::TrackCaller;
}
pub(crate) struct NoMangleParser;
impl<S: Stage> SingleAttributeParser<S> for NoMangleParser {
const PATH: &[rustc_span::Symbol] = &[sym::no_mangle];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast;
impl<S: Stage> NoArgsAttributeParser<S> for NoMangleParser {
const PATH: &[Symbol] = &[sym::no_mangle];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
return None;
}
Some(AttributeKind::NoMangle(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::NoMangle;
}
#[derive(Default)]

View file

@ -1,38 +1,19 @@
use rustc_attr_data_structures::AttributeKind;
use rustc_feature::{AttributeTemplate, template};
use rustc_span::{Symbol, sym};
use rustc_span::{Span, Symbol, sym};
use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser};
use crate::context::{AcceptContext, Stage};
use crate::parser::ArgParser;
use crate::attributes::{NoArgsAttributeParser, OnDuplicate};
use crate::context::Stage;
pub(crate) struct AsPtrParser;
impl<S: Stage> SingleAttributeParser<S> for AsPtrParser {
impl<S: Stage> NoArgsAttributeParser<S> for AsPtrParser {
const PATH: &[Symbol] = &[sym::rustc_as_ptr];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
}
Some(AttributeKind::AsPtr(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::AsPtr;
}
pub(crate) struct PubTransparentParser;
impl<S: Stage> SingleAttributeParser<S> for PubTransparentParser {
impl<S: Stage> NoArgsAttributeParser<S> for PubTransparentParser {
const PATH: &[Symbol] = &[sym::rustc_pub_transparent];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
}
Some(AttributeKind::PubTransparent(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::PubTransparent;
}

View file

@ -1,31 +1,19 @@
use rustc_attr_data_structures::AttributeKind;
use rustc_feature::{AttributeTemplate, template};
use rustc_span::{Symbol, sym};
use rustc_span::{Span, Symbol, sym};
use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser};
use crate::context::{AcceptContext, Stage};
use crate::parser::ArgParser;
use crate::attributes::{NoArgsAttributeParser, OnDuplicate};
use crate::context::Stage;
pub(crate) struct LoopMatchParser;
impl<S: Stage> SingleAttributeParser<S> for LoopMatchParser {
impl<S: Stage> NoArgsAttributeParser<S> for LoopMatchParser {
const PATH: &[Symbol] = &[sym::loop_match];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> {
Some(AttributeKind::LoopMatch(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::LoopMatch;
}
pub(crate) struct ConstContinueParser;
impl<S: Stage> SingleAttributeParser<S> for ConstContinueParser {
impl<S: Stage> NoArgsAttributeParser<S> for ConstContinueParser {
const PATH: &[Symbol] = &[sym::const_continue];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> {
Some(AttributeKind::ConstContinue(cx.attr_span))
}
const CREATE: fn(Span) -> AttributeKind = AttributeKind::ConstContinue;
}

View file

@ -17,7 +17,7 @@
use std::marker::PhantomData;
use rustc_attr_data_structures::AttributeKind;
use rustc_feature::AttributeTemplate;
use rustc_feature::{AttributeTemplate, template};
use rustc_span::{Span, Symbol};
use thin_vec::ThinVec;
@ -229,6 +229,41 @@ pub(crate) enum AttributeOrder {
KeepLast,
}
/// An even simpler version of [`SingleAttributeParser`]:
/// now automatically check that there are no arguments provided to the attribute.
///
/// [`WithoutArgs<T> where T: NoArgsAttributeParser`](WithoutArgs) implements [`SingleAttributeParser`].
//
pub(crate) trait NoArgsAttributeParser<S: Stage>: 'static {
const PATH: &[Symbol];
const ON_DUPLICATE: OnDuplicate<S>;
/// Create the [`AttributeKind`] given attribute's [`Span`].
const CREATE: fn(Span) -> AttributeKind;
}
pub(crate) struct WithoutArgs<T: NoArgsAttributeParser<S>, S: Stage>(PhantomData<(S, T)>);
impl<T: NoArgsAttributeParser<S>, S: Stage> Default for WithoutArgs<T, S> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T: NoArgsAttributeParser<S>, S: Stage> SingleAttributeParser<S> for WithoutArgs<T, S> {
const PATH: &[Symbol] = T::PATH;
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast;
const ON_DUPLICATE: OnDuplicate<S> = T::ON_DUPLICATE;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
}
Some(T::CREATE(cx.attr_span))
}
}
type ConvertFn<E> = fn(ThinVec<E>) -> AttributeKind;
/// Alternative to [`AttributeParser`] that automatically handles state management.

View file

@ -1,22 +1,12 @@
use rustc_attr_data_structures::AttributeKind;
use rustc_feature::{AttributeTemplate, template};
use rustc_span::{Symbol, sym};
use rustc_span::{Span, Symbol, sym};
use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser};
use crate::context::{AcceptContext, Stage};
use crate::parser::ArgParser;
use crate::attributes::{NoArgsAttributeParser, OnDuplicate};
use crate::context::Stage;
pub(crate) struct MayDangleParser;
impl<S: Stage> SingleAttributeParser<S> for MayDangleParser {
impl<S: Stage> NoArgsAttributeParser<S> for MayDangleParser {
const PATH: &[Symbol] = &[sym::may_dangle];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
}
Some(AttributeKind::MayDangle(cx.attr_span))
}
const CREATE: fn(span: Span) -> AttributeKind = AttributeKind::MayDangle;
}

View file

@ -5,11 +5,12 @@ use rustc_attr_data_structures::{
StableSince, UnstableReason, VERSION_PLACEHOLDER,
};
use rustc_errors::ErrorGuaranteed;
use rustc_feature::{AttributeTemplate, template};
use rustc_feature::template;
use rustc_span::{Ident, Span, Symbol, sym};
use super::util::parse_version;
use super::{AcceptMapping, AttributeOrder, AttributeParser, OnDuplicate, SingleAttributeParser};
use super::{AcceptMapping, AttributeParser, OnDuplicate};
use crate::attributes::NoArgsAttributeParser;
use crate::context::{AcceptContext, FinalizeContext, Stage};
use crate::parser::{ArgParser, MetaItemParser};
use crate::session_diagnostics::{self, UnsupportedLiteralReason};
@ -132,19 +133,10 @@ impl<S: Stage> AttributeParser<S> for BodyStabilityParser {
}
pub(crate) struct ConstStabilityIndirectParser;
// FIXME(jdonszelmann): single word attribute group when we have these
impl<S: Stage> SingleAttributeParser<S> for ConstStabilityIndirectParser {
impl<S: Stage> NoArgsAttributeParser<S> for ConstStabilityIndirectParser {
const PATH: &[Symbol] = &[sym::rustc_const_stable_indirect];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Ignore;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if let Err(span) = args.no_args() {
cx.expected_no_args(span);
}
Some(AttributeKind::ConstStabilityIndirect)
}
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::ConstStabilityIndirect;
}
#[derive(Default)]

View file

@ -37,7 +37,7 @@ use crate::attributes::stability::{
};
use crate::attributes::traits::SkipDuringMethodDispatchParser;
use crate::attributes::transparency::TransparencyParser;
use crate::attributes::{AttributeParser as _, Combine, Single};
use crate::attributes::{AttributeParser as _, Combine, Single, WithoutArgs};
use crate::parser::{ArgParser, MetaItemParser, PathParser};
use crate::session_diagnostics::{AttributeParseError, AttributeParseErrorReason, UnknownMetaItem};
@ -58,6 +58,7 @@ macro_rules! attribute_parsers {
use super::*;
type Combine<T> = super::Combine<T, Early>;
type Single<T> = super::Single<T, Early>;
type WithoutArgs<T> = super::WithoutArgs<T, Early>;
attribute_parsers!(@[Early] pub(crate) static $name = [$($names),*];);
}
@ -65,6 +66,7 @@ macro_rules! attribute_parsers {
use super::*;
type Combine<T> = super::Combine<T, Late>;
type Single<T> = super::Single<T, Late>;
type WithoutArgs<T> = super::WithoutArgs<T, Late>;
attribute_parsers!(@[Late] pub(crate) static $name = [$($names),*];);
}
@ -119,28 +121,28 @@ attribute_parsers!(
// tidy-alphabetical-end
// tidy-alphabetical-start
Single<AsPtrParser>,
Single<ColdParser>,
Single<ConstContinueParser>,
Single<ConstStabilityIndirectParser>,
Single<DeprecationParser>,
Single<ExportNameParser>,
Single<InlineParser>,
Single<LinkNameParser>,
Single<LinkSectionParser>,
Single<LoopMatchParser>,
Single<MayDangleParser>,
Single<MustUseParser>,
Single<NoMangleParser>,
Single<OptimizeParser>,
Single<PubTransparentParser>,
Single<RustcForceInlineParser>,
Single<RustcLayoutScalarValidRangeEnd>,
Single<RustcLayoutScalarValidRangeStart>,
Single<RustcObjectLifetimeDefaultParser>,
Single<SkipDuringMethodDispatchParser>,
Single<TrackCallerParser>,
Single<TransparencyParser>,
Single<WithoutArgs<AsPtrParser>>,
Single<WithoutArgs<ColdParser>>,
Single<WithoutArgs<ConstContinueParser>>,
Single<WithoutArgs<ConstStabilityIndirectParser>>,
Single<WithoutArgs<LoopMatchParser>>,
Single<WithoutArgs<MayDangleParser>>,
Single<WithoutArgs<NoMangleParser>>,
Single<WithoutArgs<PubTransparentParser>>,
Single<WithoutArgs<TrackCallerParser>>,
// tidy-alphabetical-end
];
);

View file

@ -124,7 +124,7 @@ pub(crate) fn expand_deriving_coerce_pointee(
GenericParamKind::Type { default: _ } => {
cx.typaram(p.span(), p.ident, p.bounds.clone(), None)
}
GenericParamKind::Const { ty, kw_span: _, default: _ } => cx
GenericParamKind::Const { ty, span: _, default: _ } => cx
.const_param(
p.span(),
p.ident,

View file

@ -664,10 +664,10 @@ impl<'a> TraitDef<'a> {
cx.typaram(param.ident.span.with_ctxt(ctxt), param.ident, bounds, None)
}
GenericParamKind::Const { ty, kw_span, .. } => {
GenericParamKind::Const { ty, span, .. } => {
let const_nodefault_kind = GenericParamKind::Const {
ty: ty.clone(),
kw_span: kw_span.with_ctxt(ctxt),
span: span.with_ctxt(ctxt),
// We can't have default values inside impl block
default: None,

View file

@ -1117,7 +1117,7 @@ pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// While optimizations will remove no-op transmutes, they might still be
// there in debug or things that aren't no-op in MIR because they change
// the Rust type but not the underlying layout/niche.
if from_scalar == to_scalar {
if from_scalar == to_scalar && from_backend_ty == to_backend_ty {
return imm;
}
@ -1136,13 +1136,7 @@ pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
assume_scalar_range(bx, imm, from_scalar, from_backend_ty);
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
(Int(..) | Float(_), Int(..) | Float(_)) => {
if from_backend_ty == to_backend_ty {
imm
} else {
bx.bitcast(imm, to_backend_ty)
}
}
(Int(..) | Float(_), Int(..) | Float(_)) => bx.bitcast(imm, to_backend_ty),
(Pointer(..), Pointer(..)) => bx.pointercast(imm, to_backend_ty),
(Int(..), Pointer(..)) => bx.ptradd(bx.const_null(bx.type_ptr()), imm),
(Pointer(..), Int(..)) => {

View file

@ -10,7 +10,7 @@ use rustc_hir::{self as hir, CRATE_HIR_ID, LangItem};
use rustc_middle::mir::AssertMessage;
use rustc_middle::mir::interpret::ReportedErrorInfo;
use rustc_middle::query::TyCtxtAt;
use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout};
use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout, ValidityRequirement};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::{bug, mir};
use rustc_span::{Span, Symbol, sym};
@ -23,8 +23,8 @@ use crate::fluent_generated as fluent;
use crate::interpret::{
self, AllocId, AllocInit, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame,
GlobalAlloc, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, Pointer, RangeSet, Scalar,
compile_time_machine, interp_ok, throw_exhaust, throw_inval, throw_ub, throw_ub_custom,
throw_unsup, throw_unsup_format,
compile_time_machine, err_inval, interp_ok, throw_exhaust, throw_inval, throw_ub,
throw_ub_custom, throw_unsup, throw_unsup_format,
};
/// When hitting this many interpreted terminators we emit a deny by default lint
@ -462,6 +462,44 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
// (We know the value here in the machine of course, but this is the runtime of that code,
// not the optimization stage.)
sym::is_val_statically_known => ecx.write_scalar(Scalar::from_bool(false), dest)?,
// We handle these here since Miri does not want to have them.
sym::assert_inhabited
| sym::assert_zero_valid
| sym::assert_mem_uninitialized_valid => {
let ty = instance.args.type_at(0);
let requirement = ValidityRequirement::from_intrinsic(intrinsic_name).unwrap();
let should_panic = !ecx
.tcx
.check_validity_requirement((requirement, ecx.typing_env().as_query_input(ty)))
.map_err(|_| err_inval!(TooGeneric))?;
if should_panic {
let layout = ecx.layout_of(ty)?;
let msg = match requirement {
// For *all* intrinsics we first check `is_uninhabited` to give a more specific
// error message.
_ if layout.is_uninhabited() => format!(
"aborted execution: attempted to instantiate uninhabited type `{ty}`"
),
ValidityRequirement::Inhabited => bug!("handled earlier"),
ValidityRequirement::Zero => format!(
"aborted execution: attempted to zero-initialize type `{ty}`, which is invalid"
),
ValidityRequirement::UninitMitigated0x01Fill => format!(
"aborted execution: attempted to leave type `{ty}` uninitialized, which is invalid"
),
ValidityRequirement::Uninit => bug!("assert_uninit_valid doesn't exist"),
};
Self::panic_nounwind(ecx, &msg)?;
// Skip the `return_to_block` at the end (we panicked, we do not return).
return interp_ok(None);
}
}
_ => {
// We haven't handled the intrinsic, let's see if we can use a fallback body.
if ecx.tcx.intrinsic(instance.def_id()).unwrap().must_be_overridden {

View file

@ -7,7 +7,7 @@ use std::assert_matches::assert_matches;
use rustc_abi::Size;
use rustc_apfloat::ieee::{Double, Half, Quad, Single};
use rustc_middle::mir::{self, BinOp, ConstValue, NonDivergingIntrinsic};
use rustc_middle::ty::layout::{TyAndLayout, ValidityRequirement};
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{Ty, TyCtxt};
use rustc_middle::{bug, ty};
use rustc_span::{Symbol, sym};
@ -17,8 +17,8 @@ use super::memory::MemoryKind;
use super::util::ensure_monomorphic_enough;
use super::{
Allocation, CheckInAllocMsg, ConstAllocation, ImmTy, InterpCx, InterpResult, Machine, OpTy,
PlaceTy, Pointer, PointerArithmetic, Provenance, Scalar, err_inval, err_ub_custom,
err_unsup_format, interp_ok, throw_inval, throw_ub_custom, throw_ub_format,
PlaceTy, Pointer, PointerArithmetic, Provenance, Scalar, err_ub_custom, err_unsup_format,
interp_ok, throw_inval, throw_ub_custom, throw_ub_format,
};
use crate::fluent_generated as fluent;
@ -372,41 +372,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
self.exact_div(&val, &size, dest)?;
}
sym::assert_inhabited
| sym::assert_zero_valid
| sym::assert_mem_uninitialized_valid => {
let ty = instance.args.type_at(0);
let requirement = ValidityRequirement::from_intrinsic(intrinsic_name).unwrap();
let should_panic = !self
.tcx
.check_validity_requirement((requirement, self.typing_env.as_query_input(ty)))
.map_err(|_| err_inval!(TooGeneric))?;
if should_panic {
let layout = self.layout_of(ty)?;
let msg = match requirement {
// For *all* intrinsics we first check `is_uninhabited` to give a more specific
// error message.
_ if layout.is_uninhabited() => format!(
"aborted execution: attempted to instantiate uninhabited type `{ty}`"
),
ValidityRequirement::Inhabited => bug!("handled earlier"),
ValidityRequirement::Zero => format!(
"aborted execution: attempted to zero-initialize type `{ty}`, which is invalid"
),
ValidityRequirement::UninitMitigated0x01Fill => format!(
"aborted execution: attempted to leave type `{ty}` uninitialized, which is invalid"
),
ValidityRequirement::Uninit => bug!("assert_uninit_valid doesn't exist"),
};
M::panic_nounwind(self, &msg)?;
// Skip the `return_to_block` at the end (we panicked, we do not return).
return interp_ok(true);
}
}
sym::simd_insert => {
let index = u64::from(self.read_scalar(&args[1])?.to_u32()?);
let elem = &args[2];

View file

@ -655,7 +655,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// The caller is responsible for calling the access hooks!
///
/// You almost certainly want to use `get_ptr_alloc`/`get_ptr_alloc_mut` instead.
fn get_alloc_raw(
pub fn get_alloc_raw(
&self,
id: AllocId,
) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
@ -757,7 +757,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
///
/// Also returns a ptr to `self.extra` so that the caller can use it in parallel with the
/// allocation.
fn get_alloc_raw_mut(
///
/// You almost certainly want to use `get_ptr_alloc`/`get_ptr_alloc_mut` instead.
pub fn get_alloc_raw_mut(
&mut self,
id: AllocId,
) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
@ -976,15 +978,15 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
interp_ok(())
}
/// Handle the effect an FFI call might have on the state of allocations.
/// This overapproximates the modifications which external code might make to memory:
/// We set all reachable allocations as initialized, mark all reachable provenances as exposed
/// and overwrite them with `Provenance::WILDCARD`.
///
/// The allocations in `ids` are assumed to be already exposed.
pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
/// Visit all allocations reachable from the given start set, by recursively traversing the
/// provenance information of those allocations.
pub fn visit_reachable_allocs(
&mut self,
start: Vec<AllocId>,
mut visit: impl FnMut(&mut Self, AllocId, &AllocInfo) -> InterpResult<'tcx>,
) -> InterpResult<'tcx> {
let mut done = FxHashSet::default();
let mut todo = ids;
let mut todo = start;
while let Some(id) = todo.pop() {
if !done.insert(id) {
// We already saw this allocation before, don't process it again.
@ -992,31 +994,20 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
}
let info = self.get_alloc_info(id);
// If there is no data behind this pointer, skip this.
if !matches!(info.kind, AllocKind::LiveData) {
continue;
}
// Expose all provenances in this allocation, and add them to `todo`.
let alloc = self.get_alloc_raw(id)?;
for prov in alloc.provenance().provenances() {
M::expose_provenance(self, prov)?;
if let Some(id) = prov.get_alloc_id() {
todo.push(id);
// Recurse, if there is data here.
// Do this *before* invoking the callback, as the callback might mutate the
// allocation and e.g. replace all provenance by wildcards!
if matches!(info.kind, AllocKind::LiveData) {
let alloc = self.get_alloc_raw(id)?;
for prov in alloc.provenance().provenances() {
if let Some(id) = prov.get_alloc_id() {
todo.push(id);
}
}
}
// Also expose the provenance of the interpreter-level allocation, so it can
// be read by FFI. The `black_box` is defensive programming as LLVM likes
// to (incorrectly) optimize away ptr2int casts whose result is unused.
std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance());
// Prepare for possible write from native code if mutable.
if info.mutbl.is_mut() {
self.get_alloc_raw_mut(id)?
.0
.prepare_for_native_write()
.map_err(|e| e.to_interp_error(id))?;
}
// Call the callback.
visit(self, id, &info)?;
}
interp_ok(())
}
@ -1073,7 +1064,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
todo.extend(static_roots(self));
while let Some(id) = todo.pop() {
if reachable.insert(id) {
// This is a new allocation, add the allocation it points to `todo`.
// This is a new allocation, add the allocations it points to `todo`.
// We only need to care about `alloc_map` memory here, as entirely unchanged
// global memory cannot point to memory relevant for the leak check.
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
todo.extend(
alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),

View file

@ -88,6 +88,7 @@ use std::fmt::Display;
use std::intrinsics::unlikely;
use std::path::Path;
use std::sync::Arc;
use std::sync::atomic::Ordering;
use std::time::{Duration, Instant};
use std::{fs, process};
@ -99,12 +100,15 @@ use tracing::warn;
use crate::fx::FxHashMap;
use crate::outline;
use crate::sync::AtomicU64;
bitflags::bitflags! {
#[derive(Clone, Copy)]
struct EventFilter: u16 {
const GENERIC_ACTIVITIES = 1 << 0;
const QUERY_PROVIDERS = 1 << 1;
/// Store detailed instant events, including timestamp and thread ID,
/// per each query cache hit. Note that this is quite expensive.
const QUERY_CACHE_HITS = 1 << 2;
const QUERY_BLOCKED = 1 << 3;
const INCR_CACHE_LOADS = 1 << 4;
@ -113,16 +117,20 @@ bitflags::bitflags! {
const FUNCTION_ARGS = 1 << 6;
const LLVM = 1 << 7;
const INCR_RESULT_HASHING = 1 << 8;
const ARTIFACT_SIZES = 1 << 9;
const ARTIFACT_SIZES = 1 << 9;
/// Store aggregated counts of cache hits per query invocation.
const QUERY_CACHE_HIT_COUNTS = 1 << 10;
const DEFAULT = Self::GENERIC_ACTIVITIES.bits() |
Self::QUERY_PROVIDERS.bits() |
Self::QUERY_BLOCKED.bits() |
Self::INCR_CACHE_LOADS.bits() |
Self::INCR_RESULT_HASHING.bits() |
Self::ARTIFACT_SIZES.bits();
Self::ARTIFACT_SIZES.bits() |
Self::QUERY_CACHE_HIT_COUNTS.bits();
const ARGS = Self::QUERY_KEYS.bits() | Self::FUNCTION_ARGS.bits();
const QUERY_CACHE_HIT_COMBINED = Self::QUERY_CACHE_HITS.bits() | Self::QUERY_CACHE_HIT_COUNTS.bits();
}
}
@ -134,6 +142,7 @@ const EVENT_FILTERS_BY_NAME: &[(&str, EventFilter)] = &[
("generic-activity", EventFilter::GENERIC_ACTIVITIES),
("query-provider", EventFilter::QUERY_PROVIDERS),
("query-cache-hit", EventFilter::QUERY_CACHE_HITS),
("query-cache-hit-count", EventFilter::QUERY_CACHE_HITS),
("query-blocked", EventFilter::QUERY_BLOCKED),
("incr-cache-load", EventFilter::INCR_CACHE_LOADS),
("query-keys", EventFilter::QUERY_KEYS),
@ -411,13 +420,24 @@ impl SelfProfilerRef {
#[inline(never)]
#[cold]
fn cold_call(profiler_ref: &SelfProfilerRef, query_invocation_id: QueryInvocationId) {
profiler_ref.instant_query_event(
|profiler| profiler.query_cache_hit_event_kind,
query_invocation_id,
);
if profiler_ref.event_filter_mask.contains(EventFilter::QUERY_CACHE_HIT_COUNTS) {
profiler_ref
.profiler
.as_ref()
.unwrap()
.increment_query_cache_hit_counters(QueryInvocationId(query_invocation_id.0));
}
if unlikely(profiler_ref.event_filter_mask.contains(EventFilter::QUERY_CACHE_HITS)) {
profiler_ref.instant_query_event(
|profiler| profiler.query_cache_hit_event_kind,
query_invocation_id,
);
}
}
if unlikely(self.event_filter_mask.contains(EventFilter::QUERY_CACHE_HITS)) {
// We check both kinds of query cache hit events at once, to reduce overhead in the
// common case (with self-profile disabled).
if unlikely(self.event_filter_mask.intersects(EventFilter::QUERY_CACHE_HIT_COMBINED)) {
cold_call(self, query_invocation_id);
}
}
@ -489,6 +509,35 @@ impl SelfProfilerRef {
self.profiler.as_ref().map(|p| p.get_or_alloc_cached_string(s))
}
/// Store query cache hits to the self-profile log.
/// Should be called once at the end of the compilation session.
///
/// The cache hits are stored per **query invocation**, not **per query kind/type**.
/// `analyzeme` can later deduplicate individual query labels from the QueryInvocationId event
/// IDs.
pub fn store_query_cache_hits(&self) {
if self.event_filter_mask.contains(EventFilter::QUERY_CACHE_HIT_COUNTS) {
let profiler = self.profiler.as_ref().unwrap();
let query_hits = profiler.query_hits.read();
let builder = EventIdBuilder::new(&profiler.profiler);
let thread_id = get_thread_id();
for (query_invocation, hit_count) in query_hits.iter().enumerate() {
let hit_count = hit_count.load(Ordering::Relaxed);
// No need to record empty cache hit counts
if hit_count > 0 {
let event_id =
builder.from_label(StringId::new_virtual(query_invocation as u64));
profiler.profiler.record_integer_event(
profiler.query_cache_hit_count_event_kind,
event_id,
thread_id,
hit_count,
);
}
}
}
}
#[inline]
pub fn enabled(&self) -> bool {
self.profiler.is_some()
@ -537,6 +586,19 @@ pub struct SelfProfiler {
string_cache: RwLock<FxHashMap<String, StringId>>,
/// Recording individual query cache hits as "instant" measureme events
/// is incredibly expensive. Instead of doing that, we simply aggregate
/// cache hit *counts* per query invocation, and then store the final count
/// of cache hits per invocation at the end of the compilation session.
///
/// With this approach, we don't know the individual thread IDs and timestamps
/// of cache hits, but it has very little overhead on top of `-Zself-profile`.
/// Recording the cache hits as individual events made compilation 3-5x slower.
///
/// Query invocation IDs should be monotonic integers, so we can store them in a vec,
/// rather than using a hashmap.
query_hits: RwLock<Vec<AtomicU64>>,
query_event_kind: StringId,
generic_activity_event_kind: StringId,
incremental_load_result_event_kind: StringId,
@ -544,6 +606,8 @@ pub struct SelfProfiler {
query_blocked_event_kind: StringId,
query_cache_hit_event_kind: StringId,
artifact_size_event_kind: StringId,
/// Total cache hits per query invocation
query_cache_hit_count_event_kind: StringId,
}
impl SelfProfiler {
@ -573,6 +637,7 @@ impl SelfProfiler {
let query_blocked_event_kind = profiler.alloc_string("QueryBlocked");
let query_cache_hit_event_kind = profiler.alloc_string("QueryCacheHit");
let artifact_size_event_kind = profiler.alloc_string("ArtifactSize");
let query_cache_hit_count_event_kind = profiler.alloc_string("QueryCacheHitCount");
let mut event_filter_mask = EventFilter::empty();
@ -618,6 +683,8 @@ impl SelfProfiler {
query_blocked_event_kind,
query_cache_hit_event_kind,
artifact_size_event_kind,
query_cache_hit_count_event_kind,
query_hits: Default::default(),
})
}
@ -627,6 +694,25 @@ impl SelfProfiler {
self.profiler.alloc_string(s)
}
/// Store a cache hit of a query invocation
pub fn increment_query_cache_hit_counters(&self, id: QueryInvocationId) {
// Fast path: assume that the query was already encountered before, and just record
// a cache hit.
let mut guard = self.query_hits.upgradable_read();
let query_hits = &guard;
let index = id.0 as usize;
if index < query_hits.len() {
// We only want to increment the count, no other synchronization is required
query_hits[index].fetch_add(1, Ordering::Relaxed);
} else {
// If not, we need to extend the query hit map to the highest observed ID
guard.with_upgraded(|vec| {
vec.resize_with(index + 1, || AtomicU64::new(0));
vec[index] = AtomicU64::from(1);
});
}
}
/// Gets a `StringId` for the given string. This method makes sure that
/// any strings going through it will only be allocated once in the
/// profiling data.

View file

@ -172,7 +172,7 @@ impl<'a> ExtCtxt<'a> {
attrs: AttrVec::new(),
bounds,
is_placeholder: false,
kind: ast::GenericParamKind::Const { ty, kw_span: DUMMY_SP, default },
kind: ast::GenericParamKind::Const { ty, span: DUMMY_SP, default },
colon_span: None,
}
}
@ -195,6 +195,7 @@ impl<'a> ExtCtxt<'a> {
},
trait_ref: self.trait_ref(path),
span,
parens: ast::Parens::No,
}
}

View file

@ -195,38 +195,6 @@ impl<'dcx> CollectTrackerAndEmitter<'dcx, '_> {
}
}
/// Currently used by macro_rules! compilation to extract a little information from the `Failure`
/// case.
pub(crate) struct FailureForwarder<'matcher> {
expected_token: Option<&'matcher Token>,
}
impl<'matcher> FailureForwarder<'matcher> {
pub(crate) fn new() -> Self {
Self { expected_token: None }
}
}
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
type Failure = (Token, u32, &'static str);
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
(tok, position, msg)
}
fn description() -> &'static str {
"failure-forwarder"
}
fn set_expected_token(&mut self, tok: &'matcher Token) {
self.expected_token = Some(tok);
}
fn get_expected_token(&self) -> Option<&'matcher Token> {
self.expected_token
}
}
pub(super) fn emit_frag_parse_err(
mut e: Diag<'_>,
parser: &Parser<'_>,
@ -321,7 +289,7 @@ enum ExplainDocComment {
},
}
pub(super) fn annotate_doc_comment(err: &mut Diag<'_>, sm: &SourceMap, span: Span) {
fn annotate_doc_comment(err: &mut Diag<'_>, sm: &SourceMap, span: Span) {
if let Ok(src) = sm.span_to_snippet(span) {
if src.starts_with("///") || src.starts_with("/**") {
err.subdiagnostic(ExplainDocComment::Outer { span });
@ -333,7 +301,7 @@ pub(super) fn annotate_doc_comment(err: &mut Diag<'_>, sm: &SourceMap, span: Spa
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
pub(super) fn parse_failure_msg(tok: &Token, expected_token: Option<&Token>) -> Cow<'static, str> {
fn parse_failure_msg(tok: &Token, expected_token: Option<&Token>) -> Cow<'static, str> {
if let Some(expected_token) = expected_token {
Cow::from(format!("expected {}, found {}", token_descr(expected_token), token_descr(tok)))
} else {

View file

@ -105,8 +105,6 @@
//! stored when entering a macro definition starting from the state in which the meta-variable is
//! bound.
use std::iter;
use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::{DUMMY_NODE_ID, NodeId};
use rustc_data_structures::fx::FxHashMap;
@ -190,29 +188,22 @@ struct MacroState<'a> {
ops: SmallVec<[KleeneToken; 1]>,
}
/// Checks that meta-variables are used correctly in a macro definition.
/// Checks that meta-variables are used correctly in one rule of a macro definition.
///
/// Arguments:
/// - `psess` is used to emit diagnostics and lints
/// - `node_id` is used to emit lints
/// - `span` is used when no spans are available
/// - `lhses` and `rhses` should have the same length and represent the macro definition
/// - `lhs` and `rhs` represent the rule
pub(super) fn check_meta_variables(
psess: &ParseSess,
node_id: NodeId,
span: Span,
lhses: &[TokenTree],
rhses: &[TokenTree],
lhs: &TokenTree,
rhs: &TokenTree,
) -> Result<(), ErrorGuaranteed> {
if lhses.len() != rhses.len() {
psess.dcx().span_bug(span, "length mismatch between LHSes and RHSes")
}
let mut guar = None;
for (lhs, rhs) in iter::zip(lhses, rhses) {
let mut binders = Binders::default();
check_binders(psess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut guar);
check_occurrences(psess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut guar);
}
let mut binders = Binders::default();
check_binders(psess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut guar);
check_occurrences(psess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut guar);
guar.map_or(Ok(()), Err)
}

View file

@ -536,8 +536,6 @@ impl TtParser {
// The separator matches the current token. Advance past it.
mp.idx += 1;
self.next_mps.push(mp);
} else {
track.set_expected_token(separator);
}
}
&MatcherLoc::SequenceKleeneOpAfterSep { idx_first } => {

View file

@ -19,12 +19,13 @@ use rustc_lint_defs::BuiltinLintDiag;
use rustc_lint_defs::builtin::{
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
};
use rustc_parse::parser::{ParseNtResult, Parser, Recovery};
use rustc_parse::exp;
use rustc_parse::parser::{Parser, Recovery};
use rustc_session::Session;
use rustc_session::parse::ParseSess;
use rustc_span::edition::Edition;
use rustc_span::hygiene::Transparency;
use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, kw, sym};
use rustc_span::{Ident, Span, kw, sym};
use tracing::{debug, instrument, trace, trace_span};
use super::macro_parser::{NamedMatches, NamedParseResult};
@ -34,8 +35,6 @@ use crate::base::{
SyntaxExtensionKind, TTMacroExpander,
};
use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg};
use crate::mbe::macro_parser::NamedMatch::*;
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
use crate::mbe::transcribe::transcribe;
use crate::mbe::{self, KleeneOp, macro_check};
@ -168,11 +167,6 @@ pub(super) trait Tracker<'matcher> {
fn recovery() -> Recovery {
Recovery::Forbidden
}
fn set_expected_token(&mut self, _tok: &'matcher Token) {}
fn get_expected_token(&self) -> Option<&'matcher Token> {
None
}
}
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to
@ -360,11 +354,6 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
Err(CanRetry::Yes)
}
// Note that macro-by-example's input is also matched against a token tree:
// $( $lhs:tt => $rhs:tt );+
//
// Holy self-referential!
/// Converts a macro item into a syntax extension.
pub fn compile_declarative_macro(
sess: &Session,
@ -390,157 +379,66 @@ pub fn compile_declarative_macro(
};
let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), Vec::new());
let lhs_nm = Ident::new(sym::lhs, span);
let rhs_nm = Ident::new(sym::rhs, span);
let tt_spec = NonterminalKind::TT;
let macro_rules = macro_def.macro_rules;
let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
// Parse the macro_rules! invocation
// The pattern that macro_rules matches.
// The grammar for macro_rules! is:
// $( $lhs:tt => $rhs:tt );+
// ...quasiquoting this would be nice.
// These spans won't matter, anyways
let argument_gram = vec![
mbe::TokenTree::Sequence(
DelimSpan::dummy(),
mbe::SequenceRepetition {
tts: vec![
mbe::TokenTree::MetaVarDecl { span, name: lhs_nm, kind: tt_spec },
mbe::TokenTree::token(token::FatArrow, span),
mbe::TokenTree::MetaVarDecl { span, name: rhs_nm, kind: tt_spec },
],
separator: Some(Token::new(
if macro_rules { token::Semi } else { token::Comma },
span,
)),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, span),
num_captures: 2,
},
),
// to phase into semicolon-termination instead of semicolon-separation
mbe::TokenTree::Sequence(
DelimSpan::dummy(),
mbe::SequenceRepetition {
tts: vec![mbe::TokenTree::token(
if macro_rules { token::Semi } else { token::Comma },
span,
)],
separator: None,
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, span),
num_captures: 0,
},
),
];
// Convert it into `MatcherLoc` form.
let argument_gram = mbe::macro_parser::compute_locs(&argument_gram);
let create_parser = || {
let body = macro_def.body.tokens.clone();
Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS)
};
let parser = create_parser();
let mut tt_parser =
TtParser::new(Ident::with_dummy_span(if macro_rules { kw::MacroRules } else { kw::Macro }));
let argument_map =
match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) {
Success(m) => m,
Failure(()) => {
debug!("failed to parse macro tt");
// The fast `NoopTracker` doesn't have any info on failure, so we need to retry it
// with another one that gives us the information we need.
// For this we need to reclone the macro body as the previous parser consumed it.
let retry_parser = create_parser();
let mut track = diagnostics::FailureForwarder::new();
let parse_result =
tt_parser.parse_tt(&mut Cow::Owned(retry_parser), &argument_gram, &mut track);
let Failure((token, _, msg)) = parse_result else {
unreachable!("matcher returned something other than Failure after retry");
};
let s = parse_failure_msg(&token, track.get_expected_token());
let sp = token.span.substitute_dummy(span);
let mut err = sess.dcx().struct_span_err(sp, s);
err.span_label(sp, msg);
annotate_doc_comment(&mut err, sess.source_map(), sp);
let guar = err.emit();
return dummy_syn_ext(guar);
}
Error(sp, msg) => {
let guar = sess.dcx().span_err(sp.substitute_dummy(span), msg);
return dummy_syn_ext(guar);
}
ErrorReported(guar) => {
return dummy_syn_ext(guar);
}
};
let body = macro_def.body.tokens.clone();
let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
// Don't abort iteration early, so that multiple errors can be reported.
let mut guar = None;
let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
// Extract the arguments:
let lhses = match &argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
MatchedSeq(s) => s
.iter()
.map(|m| {
if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
let tt = mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
true,
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
// We don't handle errors here, the driver will abort
// after parsing/expansion. We can report every error in every macro this way.
check_emission(check_lhs_nt_follows(sess, node_id, &tt));
return tt;
}
sess.dcx().span_bug(span, "wrong-structured lhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.dcx().span_bug(span, "wrong-structured lhs"),
};
let mut lhses = Vec::new();
let mut rhses = Vec::new();
let rhses = match &argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
MatchedSeq(s) => s
.iter()
.map(|m| {
if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
return mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
false,
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
}
sess.dcx().span_bug(span, "wrong-structured rhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.dcx().span_bug(span, "wrong-structured rhs"),
};
for rhs in &rhses {
check_emission(check_rhs(sess, rhs));
while p.token != token::Eof {
let lhs_tt = p.parse_token_tree();
let lhs_tt = mbe::quoted::parse(
&TokenStream::new(vec![lhs_tt]),
true, // LHS
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
// We don't handle errors here, the driver will abort after parsing/expansion. We can
// report every error in every macro this way.
check_emission(check_lhs_nt_follows(sess, node_id, &lhs_tt));
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(&lhs_tt)));
if let Err(e) = p.expect(exp!(FatArrow)) {
return dummy_syn_ext(e.emit());
}
let rhs_tt = p.parse_token_tree();
let rhs_tt = mbe::quoted::parse(
&TokenStream::new(vec![rhs_tt]),
false, // RHS
sess,
node_id,
features,
edition,
)
.pop()
.unwrap();
check_emission(check_rhs(sess, &rhs_tt));
check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
lhses.push(lhs_tt);
rhses.push(rhs_tt);
if p.token == token::Eof {
break;
}
if let Err(e) = p.expect(exp_sep) {
return dummy_syn_ext(e.emit());
}
}
// Don't abort iteration early, so that errors for multiple lhses can be reported.
for lhs in &lhses {
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
if lhses.is_empty() {
let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
return dummy_syn_ext(guar);
}
check_emission(macro_check::check_meta_variables(&sess.psess, node_id, span, &lhses, &rhses));
let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
.unwrap_or(Transparency::fallback(macro_rules));

View file

@ -1,12 +1,11 @@
use std::assert_matches::assert_matches;
use std::ops::ControlFlow;
use hir::intravisit::{self, Visitor};
use hir::{GenericParamKind, HirId, Node};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::VisitorExt;
use rustc_hir::{self as hir, AmbigArg};
use rustc_hir::intravisit::{self, Visitor, VisitorExt};
use rustc_hir::{self as hir, AmbigArg, GenericParamKind, HirId, Node};
use rustc_middle::span_bug;
use rustc_middle::ty::{self, TyCtxt};
use rustc_session::lint;
use rustc_span::{Span, Symbol, kw};
@ -212,7 +211,19 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics {
// inherit the generics of the item.
Some(parent.to_def_id())
}
_ => None,
// All of these nodes have no parent from which to inherit generics.
Node::Item(_) | Node::ForeignItem(_) => None,
// Params don't really have generics, but we use it when instantiating their value paths.
Node::GenericParam(_) => None,
Node::Synthetic => span_bug!(
tcx.def_span(def_id),
"synthetic HIR should have its `generics_of` explicitly fed"
),
_ => span_bug!(tcx.def_span(def_id), "unhandled node {node:?}"),
};
enum Defaults {

View file

@ -2459,13 +2459,15 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
// type a projection.
let in_trait = match opaque_ty.origin {
hir::OpaqueTyOrigin::FnReturn {
parent,
in_trait_or_impl: Some(hir::RpitContext::Trait),
..
}
| hir::OpaqueTyOrigin::AsyncFn {
parent,
in_trait_or_impl: Some(hir::RpitContext::Trait),
..
} => true,
} => Some(parent),
hir::OpaqueTyOrigin::FnReturn {
in_trait_or_impl: None | Some(hir::RpitContext::TraitImpl),
..
@ -2474,7 +2476,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
in_trait_or_impl: None | Some(hir::RpitContext::TraitImpl),
..
}
| hir::OpaqueTyOrigin::TyAlias { .. } => false,
| hir::OpaqueTyOrigin::TyAlias { .. } => None,
};
self.lower_opaque_ty(opaque_ty.def_id, in_trait)
@ -2594,17 +2596,25 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
/// Lower an opaque type (i.e., an existential impl-Trait type) from the HIR.
#[instrument(level = "debug", skip(self), ret)]
fn lower_opaque_ty(&self, def_id: LocalDefId, in_trait: bool) -> Ty<'tcx> {
fn lower_opaque_ty(&self, def_id: LocalDefId, in_trait: Option<LocalDefId>) -> Ty<'tcx> {
let tcx = self.tcx();
let lifetimes = tcx.opaque_captured_lifetimes(def_id);
debug!(?lifetimes);
// If this is an RPITIT and we are using the new RPITIT lowering scheme, we
// generate the def_id of an associated type for the trait and return as
// type a projection.
let def_id = if in_trait {
tcx.associated_type_for_impl_trait_in_trait(def_id).to_def_id()
// If this is an RPITIT and we are using the new RPITIT lowering scheme,
// do a linear search to map this to the synthetic associated type that
// it will be lowered to.
let def_id = if let Some(parent_def_id) = in_trait {
*tcx.associated_types_for_impl_traits_in_associated_fn(parent_def_id)
.iter()
.find(|rpitit| match tcx.opt_rpitit_info(**rpitit) {
Some(ty::ImplTraitInTraitData::Trait { opaque_def_id, .. }) => {
opaque_def_id.expect_local() == def_id
}
_ => unreachable!(),
})
.unwrap()
} else {
def_id.to_def_id()
};
@ -2627,7 +2637,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
});
debug!(?args);
if in_trait {
if in_trait.is_some() {
Ty::new_projection_from_args(tcx, def_id, args)
} else {
Ty::new_opaque(tcx, def_id, args)

View file

@ -504,6 +504,7 @@ pub trait LintContext {
///
/// [`lint_level`]: rustc_middle::lint::lint_level#decorate-signature
#[rustc_lint_diagnostics]
#[track_caller]
fn opt_span_lint<S: Into<MultiSpan>>(
&self,
lint: &'static Lint,
@ -542,6 +543,7 @@ pub trait LintContext {
///
/// [`lint_level`]: rustc_middle::lint::lint_level#decorate-signature
#[rustc_lint_diagnostics]
#[track_caller]
fn span_lint<S: Into<MultiSpan>>(
&self,
lint: &'static Lint,

View file

@ -3,6 +3,7 @@ use std::iter;
use rustc_ast::util::{classify, parser};
use rustc_ast::{self as ast, ExprKind, HasAttrs as _, StmtKind};
use rustc_attr_data_structures::{AttributeKind, find_attr};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{MultiSpan, pluralize};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
@ -10,6 +11,7 @@ use rustc_hir::{self as hir, LangItem};
use rustc_infer::traits::util::elaborate;
use rustc_middle::ty::{self, Ty, adjustment};
use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
use rustc_span::edition::Edition::Edition2015;
use rustc_span::{BytePos, Span, Symbol, kw, sym};
use tracing::instrument;
@ -1034,6 +1036,31 @@ pub(crate) struct UnusedParens {
/// `1 as (i32) < 2` parses to ExprKind::Lt
/// `1 as i32 < 2` parses to i32::<2[missing angle bracket]
parens_in_cast_in_lt: Vec<ast::NodeId>,
/// Ty nodes in this map are in TypeNoBounds position. Any bounds they
/// contain may be ambiguous w/r/t trailing `+` operators.
in_no_bounds_pos: FxHashMap<ast::NodeId, NoBoundsException>,
}
/// Whether parentheses may be omitted from a type without resulting in ambiguity.
///
/// ```
/// type Example = Box<dyn Fn() -> &'static (dyn Send) + Sync>;
/// ```
///
/// Here, `&'static (dyn Send) + Sync` is a `TypeNoBounds`. As such, it may not directly
/// contain `ImplTraitType` or `TraitObjectType` which is why `(dyn Send)` is parenthesized.
/// However, an exception is made for `ImplTraitTypeOneBound` and `TraitObjectTypeOneBound`.
/// The following is accepted because there is no `+`.
///
/// ```
/// type Example = Box<dyn Fn() -> &'static dyn Send>;
/// ```
enum NoBoundsException {
/// The type must be parenthesized.
None,
/// The type is the last bound of the containing type expression. If it has exactly one bound,
/// parentheses around the type are unnecessary.
OneBound,
}
impl_lint_pass!(UnusedParens => [UNUSED_PARENS]);
@ -1277,23 +1304,100 @@ impl EarlyLintPass for UnusedParens {
);
}
ast::TyKind::Paren(r) => {
match &r.kind {
ast::TyKind::TraitObject(..) => {}
ast::TyKind::BareFn(b)
if self.with_self_ty_parens && b.generic_params.len() > 0 => {}
ast::TyKind::ImplTrait(_, bounds) if bounds.len() > 1 => {}
_ => {
let spans = if !ty.span.from_expansion() {
let unused_parens = match &r.kind {
ast::TyKind::ImplTrait(_, bounds) | ast::TyKind::TraitObject(bounds, _) => {
match self.in_no_bounds_pos.get(&ty.id) {
Some(NoBoundsException::None) => false,
Some(NoBoundsException::OneBound) => bounds.len() <= 1,
None => true,
}
}
ast::TyKind::BareFn(b) => {
!self.with_self_ty_parens || b.generic_params.is_empty()
}
_ => true,
};
if unused_parens {
let spans = (!ty.span.from_expansion())
.then(|| {
r.span
.find_ancestor_inside(ty.span)
.map(|r| (ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi())))
})
.flatten();
self.emit_unused_delims(cx, ty.span, spans, "type", (false, false), false);
}
self.with_self_ty_parens = false;
}
ast::TyKind::Ref(_, mut_ty) | ast::TyKind::Ptr(mut_ty) => {
self.in_no_bounds_pos.insert(mut_ty.ty.id, NoBoundsException::OneBound);
}
ast::TyKind::TraitObject(bounds, _) | ast::TyKind::ImplTrait(_, bounds) => {
for i in 0..bounds.len() {
let is_last = i == bounds.len() - 1;
if let ast::GenericBound::Trait(poly_trait_ref) = &bounds[i] {
let fn_with_explicit_ret_ty = if let [.., segment] =
&*poly_trait_ref.trait_ref.path.segments
&& let Some(args) = segment.args.as_ref()
&& let ast::GenericArgs::Parenthesized(paren_args) = &**args
&& let ast::FnRetTy::Ty(ret_ty) = &paren_args.output
{
self.in_no_bounds_pos.insert(
ret_ty.id,
if is_last {
NoBoundsException::OneBound
} else {
NoBoundsException::None
},
);
true
} else {
None
false
};
self.emit_unused_delims(cx, ty.span, spans, "type", (false, false), false);
// In edition 2015, dyn is a contextual keyword and `dyn::foo::Bar` is
// parsed as a path, so parens are necessary to disambiguate. See
// - tests/ui/lint/unused/unused-parens-trait-obj-e2015.rs and
// - https://doc.rust-lang.org/reference/types/trait-object.html#r-type.trait-object.syntax-edition2018
let dyn2015_exception = cx.sess().psess.edition == Edition2015
&& matches!(ty.kind, ast::TyKind::TraitObject(..))
&& i == 0
&& poly_trait_ref
.trait_ref
.path
.segments
.first()
.map(|s| s.ident.name == kw::PathRoot)
.unwrap_or(false);
if let ast::Parens::Yes = poly_trait_ref.parens
&& (is_last || !fn_with_explicit_ret_ty)
&& !dyn2015_exception
{
let s = poly_trait_ref.span;
let spans = (!s.from_expansion()).then(|| {
(
s.with_hi(s.lo() + rustc_span::BytePos(1)),
s.with_lo(s.hi() - rustc_span::BytePos(1)),
)
});
self.emit_unused_delims(
cx,
poly_trait_ref.span,
spans,
"type",
(false, false),
false,
);
}
}
}
self.with_self_ty_parens = false;
}
_ => {}
}
@ -1303,6 +1407,10 @@ impl EarlyLintPass for UnusedParens {
<Self as UnusedDelimLint>::check_item(self, cx, item)
}
fn check_item_post(&mut self, _: &EarlyContext<'_>, _: &rustc_ast::Item) {
self.in_no_bounds_pos.clear();
}
fn enter_where_predicate(&mut self, _: &EarlyContext<'_>, pred: &ast::WherePredicate) {
use rustc_ast::{WhereBoundPredicate, WherePredicateKind};
if let WherePredicateKind::BoundPredicate(WhereBoundPredicate {

View file

@ -413,7 +413,6 @@ pub(super) fn rustc_queries(input: TokenStream) -> TokenStream {
"Query {name} cannot be both `feedable` and `eval_always`."
);
feedable_queries.extend(quote! {
#(#doc_comments)*
[#attribute_stream] fn #name(#arg) #result,
});
}

View file

@ -799,7 +799,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
/// Initialize all previously uninitialized bytes in the entire allocation, and set
/// provenance of everything to `Wildcard`. Before calling this, make sure all
/// provenance in this allocation is exposed!
pub fn prepare_for_native_write(&mut self) -> AllocResult {
pub fn prepare_for_native_access(&mut self) {
let full_range = AllocRange { start: Size::ZERO, size: Size::from_bytes(self.len()) };
// Overwrite uninitialized bytes with 0, to ensure we don't leak whatever their value happens to be.
for chunk in self.init_mask.range_as_init_chunks(full_range) {
@ -814,13 +814,6 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
// Set provenance of all bytes to wildcard.
self.provenance.write_wildcards(self.len());
// Also expose the provenance of the interpreter-level allocation, so it can
// be written by FFI. The `black_box` is defensive programming as LLVM likes
// to (incorrectly) optimize away ptr2int casts whose result is unused.
std::hint::black_box(self.get_bytes_unchecked_raw_mut().expose_provenance());
Ok(())
}
/// Remove all provenance in the given memory range.

View file

@ -120,7 +120,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
}
}
/// Check if here is ptr-sized provenance at the given index.
/// Check if there is ptr-sized provenance at the given index.
/// Does not mean anything for bytewise provenance! But can be useful as an optimization.
pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
self.ptrs.get(&offset).copied()

View file

@ -440,7 +440,6 @@ rustc_queries! {
query predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
desc { |tcx| "computing predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
feedable
}
query opaque_types_defined_by(
@ -1093,13 +1092,6 @@ rustc_queries! {
separate_provide_extern
}
/// Given an impl trait in trait `opaque_ty_def_id`, create and return the corresponding
/// associated item.
query associated_type_for_impl_trait_in_trait(opaque_ty_def_id: LocalDefId) -> LocalDefId {
desc { |tcx| "creating the associated item corresponding to the opaque type `{}`", tcx.def_path_str(opaque_ty_def_id.to_def_id()) }
cache_on_disk_if { true }
}
/// Given an `impl_id`, return the trait it implements along with some header information.
/// Return `None` if this is an inherent impl.
query impl_trait_header(impl_id: DefId) -> Option<ty::ImplTraitHeader<'tcx>> {

View file

@ -380,11 +380,11 @@ pub enum ExprKind<'tcx> {
},
/// A `#[loop_match] loop { state = 'blk: { match state { ... } } }` expression.
LoopMatch {
/// The state variable that is updated, and also the scrutinee of the match.
/// The state variable that is updated.
/// The `match_data.scrutinee` is the same variable, but with a different span.
state: ExprId,
region_scope: region::Scope,
arms: Box<[ArmId]>,
match_span: Span,
match_data: Box<LoopMatchMatchData>,
},
/// Special expression representing the `let` part of an `if let` or similar construct
/// (including `if let` guards in match arms, and let-chains formed by `&&`).
@ -599,6 +599,14 @@ pub struct Arm<'tcx> {
pub span: Span,
}
/// The `match` part of a `#[loop_match]`
#[derive(Clone, Debug, HashStable)]
pub struct LoopMatchMatchData {
pub scrutinee: ExprId,
pub arms: Box<[ArmId]>,
pub span: Span,
}
#[derive(Copy, Clone, Debug, HashStable)]
pub enum LogicalOp {
/// The `&&` operator.

View file

@ -2,6 +2,7 @@ use super::{
AdtExpr, AdtExprBase, Arm, Block, ClosureExpr, Expr, ExprKind, InlineAsmExpr, InlineAsmOperand,
Pat, PatKind, Stmt, StmtKind, Thir,
};
use crate::thir::LoopMatchMatchData;
/// Every `walk_*` method uses deconstruction to access fields of structs and
/// enums. This will result in a compile error if a field is added, which makes
@ -83,7 +84,8 @@ pub fn walk_expr<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>(
visitor.visit_pat(pat);
}
Loop { body } => visitor.visit_expr(&visitor.thir()[body]),
LoopMatch { state: scrutinee, ref arms, .. } | Match { scrutinee, ref arms, .. } => {
LoopMatch { match_data: box LoopMatchMatchData { scrutinee, ref arms, .. }, .. }
| Match { scrutinee, ref arms, .. } => {
visitor.visit_expr(&visitor.thir()[scrutinee]);
for &arm in &**arms {
visitor.visit_arm(&visitor.thir()[arm]);

View file

@ -245,7 +245,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
None
})
}
ExprKind::LoopMatch { state, region_scope, match_span, ref arms } => {
ExprKind::LoopMatch {
state,
region_scope,
match_data: box LoopMatchMatchData { box ref arms, span: match_span, scrutinee },
} => {
// Intuitively, this is a combination of a loop containing a labeled block
// containing a match.
//
@ -292,8 +296,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Logic for `match`.
let scrutinee_place_builder =
unpack!(body_block = this.as_place_builder(body_block, state));
let scrutinee_span = this.thir.exprs[state].span;
unpack!(body_block = this.as_place_builder(body_block, scrutinee));
let scrutinee_span = this.thir.exprs[scrutinee].span;
let match_start_span = match_span.shrink_to_lo().to(scrutinee_span);
let mut patterns = Vec::with_capacity(arms.len());
@ -335,7 +339,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
move |this| {
this.in_breakable_scope(None, state_place, expr_span, |this| {
Some(this.in_const_continuable_scope(
arms.clone(),
Box::from(arms),
built_tree.clone(),
state_place,
expr_span,

View file

@ -2970,6 +2970,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
Constructor::Wildcard => true,
// Opaque patterns must not be matched on structurally.
Constructor::Opaque(_) => false,
// These we may eventually support:
Constructor::Struct
| Constructor::Ref
@ -2980,8 +2983,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| Constructor::Str(_) => bug!("unsupported pattern constructor {:?}", pat.ctor()),
// These should never occur here:
Constructor::Opaque(_)
| Constructor::Never
Constructor::Never
| Constructor::NonExhaustive
| Constructor::Hidden
| Constructor::Missing

View file

@ -1230,7 +1230,6 @@ pub(crate) struct ConstContinueMissingValue {
#[derive(Diagnostic)]
#[diag(mir_build_const_continue_unknown_jump_target)]
#[note]
pub(crate) struct ConstContinueUnknownJumpTarget {
#[primary_span]
pub span: Span,

View file

@ -983,8 +983,11 @@ impl<'tcx> ThirBuildCx<'tcx> {
data: region::ScopeData::Node,
},
arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
match_span: block_body_expr.span,
match_data: Box::new(LoopMatchMatchData {
scrutinee: self.mirror_expr(scrutinee),
arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
span: block_body_expr.span,
}),
}
} else {
let block_ty = self.typeck_results.node_type(body.hir_id);

View file

@ -6,7 +6,7 @@ use rustc_errors::codes::*;
use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan, struct_span_code_err};
use rustc_hir::def::*;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::{self as hir, BindingMode, ByRef, HirId};
use rustc_hir::{self as hir, BindingMode, ByRef, HirId, MatchSource};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::Level;
use rustc_middle::bug;
@ -154,6 +154,12 @@ impl<'p, 'tcx> Visitor<'p, 'tcx> for MatchVisitor<'p, 'tcx> {
ExprKind::Match { scrutinee, box ref arms, match_source } => {
self.check_match(scrutinee, arms, match_source, ex.span);
}
ExprKind::LoopMatch {
match_data: box LoopMatchMatchData { scrutinee, box ref arms, span },
..
} => {
self.check_match(scrutinee, arms, MatchSource::Normal, span);
}
ExprKind::Let { box ref pat, expr } => {
self.check_let(pat, Some(expr), ex.span);
}

View file

@ -318,18 +318,23 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
self.print_expr(*body, depth_lvl + 2);
print_indented!(self, ")", depth_lvl);
}
LoopMatch { state, region_scope, match_span, arms } => {
LoopMatch { state, region_scope, match_data } => {
print_indented!(self, "LoopMatch {", depth_lvl);
print_indented!(self, "state:", depth_lvl + 1);
self.print_expr(*state, depth_lvl + 2);
print_indented!(self, format!("region_scope: {:?}", region_scope), depth_lvl + 1);
print_indented!(self, format!("match_span: {:?}", match_span), depth_lvl + 1);
print_indented!(self, "match_data:", depth_lvl + 1);
print_indented!(self, "LoopMatchMatchData {", depth_lvl + 2);
print_indented!(self, format!("span: {:?}", match_data.span), depth_lvl + 3);
print_indented!(self, "scrutinee:", depth_lvl + 3);
self.print_expr(match_data.scrutinee, depth_lvl + 4);
print_indented!(self, "arms: [", depth_lvl + 1);
for arm_id in arms.iter() {
self.print_arm(*arm_id, depth_lvl + 2);
print_indented!(self, "arms: [", depth_lvl + 3);
for arm_id in match_data.arms.iter() {
self.print_arm(*arm_id, depth_lvl + 4);
}
print_indented!(self, "]", depth_lvl + 1);
print_indented!(self, "]", depth_lvl + 3);
print_indented!(self, "}", depth_lvl + 2);
print_indented!(self, "}", depth_lvl);
}
Let { expr, pat } => {

View file

@ -120,6 +120,7 @@ enum EnumCheckType<'tcx> {
},
}
#[derive(Debug, Copy, Clone)]
struct TyAndSize<'tcx> {
pub ty: Ty<'tcx>,
pub size: Size,
@ -337,7 +338,7 @@ fn insert_direct_enum_check<'tcx>(
let invalid_discr_block_data = BasicBlockData::new(None, false);
let invalid_discr_block = basic_blocks.push(invalid_discr_block_data);
let block_data = &mut basic_blocks[current_block];
let discr = insert_discr_cast_to_u128(
let discr_place = insert_discr_cast_to_u128(
tcx,
local_decls,
block_data,
@ -348,13 +349,34 @@ fn insert_direct_enum_check<'tcx>(
source_info,
);
// Mask out the bits of the discriminant type.
let mask = discr.size.unsigned_int_max();
let discr_masked =
local_decls.push(LocalDecl::with_source_info(tcx.types.u128, source_info)).into();
let rvalue = Rvalue::BinaryOp(
BinOp::BitAnd,
Box::new((
Operand::Copy(discr_place),
Operand::Constant(Box::new(ConstOperand {
span: source_info.span,
user_ty: None,
const_: Const::Val(ConstValue::from_u128(mask), tcx.types.u128),
})),
)),
);
block_data
.statements
.push(Statement::new(source_info, StatementKind::Assign(Box::new((discr_masked, rvalue)))));
// Branch based on the discriminant value.
block_data.terminator = Some(Terminator {
source_info,
kind: TerminatorKind::SwitchInt {
discr: Operand::Copy(discr),
discr: Operand::Copy(discr_masked),
targets: SwitchTargets::new(
discriminants.into_iter().map(|discr| (discr, new_block)),
discriminants
.into_iter()
.map(|discr_val| (discr.size.truncate(discr_val), new_block)),
invalid_discr_block,
),
},
@ -371,7 +393,7 @@ fn insert_direct_enum_check<'tcx>(
})),
expected: true,
target: new_block,
msg: Box::new(AssertKind::InvalidEnumConstruction(Operand::Copy(discr))),
msg: Box::new(AssertKind::InvalidEnumConstruction(Operand::Copy(discr_masked))),
// This calls panic_invalid_enum_construction, which is #[rustc_nounwind].
// We never want to insert an unwind into unsafe code, because unwinding could
// make a failing UB check turn into much worse UB when we start unwinding.

View file

@ -239,7 +239,7 @@ pub(crate) fn coroutine_by_move_body_def_id<'tcx>(
body_def.explicit_predicates_of(tcx.explicit_predicates_of(coroutine_def_id));
body_def.generics_of(tcx.generics_of(coroutine_def_id).clone());
body_def.param_env(tcx.param_env(coroutine_def_id));
body_def.predicates_of(tcx.predicates_of(coroutine_def_id));
body_def.explicit_predicates_of(tcx.explicit_predicates_of(coroutine_def_id));
// The type of the coroutine is the `by_move_coroutine_ty`.
body_def.type_of(ty::EarlyBinder::bind(by_move_coroutine_ty));

View file

@ -114,13 +114,18 @@ impl<'a> Parser<'a> {
// Parse optional const generics default value.
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
let span = if let Some(ref default) = default {
const_span.to(default.value.span)
} else {
const_span.to(ty.span)
};
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs,
bounds: Vec::new(),
kind: GenericParamKind::Const { ty, kw_span: const_span, default },
kind: GenericParamKind::Const { ty, span, default },
is_placeholder: false,
colon_span: None,
})
@ -137,6 +142,11 @@ impl<'a> Parser<'a> {
// Parse optional const generics default value.
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
let span = if let Some(ref default) = default {
mistyped_const_ident.span.to(default.value.span)
} else {
mistyped_const_ident.span.to(ty.span)
};
self.dcx()
.struct_span_err(
@ -156,7 +166,7 @@ impl<'a> Parser<'a> {
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs,
bounds: Vec::new(),
kind: GenericParamKind::Const { ty, kw_span: mistyped_const_ident.span, default },
kind: GenericParamKind::Const { ty, span, default },
is_placeholder: false,
colon_span: None,
})

View file

@ -305,8 +305,13 @@ impl<'a> Parser<'a> {
let removal_span = kw.span.with_hi(self.token.span.lo());
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
let kind =
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?;
let kind = self.parse_remaining_bounds_path(
lifetime_defs,
path,
lo,
parse_plus,
ast::Parens::No,
)?;
let err = self.dcx().create_err(errors::TransposeDynOrImpl {
span: kw.span,
kw: kw.name.as_str(),
@ -333,7 +338,13 @@ impl<'a> Parser<'a> {
} else {
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
self.parse_remaining_bounds_path(
lifetime_defs,
path,
lo,
parse_plus,
ast::Parens::No,
)?
}
}
} else if self.eat_keyword(exp!(Impl)) {
@ -413,9 +424,13 @@ impl<'a> Parser<'a> {
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
match ty.kind {
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
TyKind::Path(None, path) if maybe_bounds => {
self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
}
TyKind::Path(None, path) if maybe_bounds => self.parse_remaining_bounds_path(
ThinVec::new(),
path,
lo,
true,
ast::Parens::Yes,
),
// For `('a) + …`, we know that `'a` in type position already lead to an error being
// emitted. To reduce output, let's indirectly suppress E0178 (bad `+` in type) and
// other irrelevant consequential errors.
@ -495,12 +510,14 @@ impl<'a> Parser<'a> {
path: ast::Path,
lo: Span,
parse_plus: bool,
parens: ast::Parens,
) -> PResult<'a, TyKind> {
let poly_trait_ref = PolyTraitRef::new(
generic_params,
path,
TraitBoundModifiers::NONE,
lo.to(self.prev_token.span),
parens,
);
let bounds = vec![GenericBound::Trait(poly_trait_ref)];
self.parse_remaining_bounds(bounds, parse_plus)
@ -826,7 +843,7 @@ impl<'a> Parser<'a> {
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
// `Trait1 + Trait2 + 'a`
self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true, ast::Parens::No)
} else {
// Just a type path.
Ok(TyKind::Path(None, path))
@ -892,10 +909,10 @@ impl<'a> Parser<'a> {
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
let lo = self.token.span;
let leading_token = self.prev_token;
let has_parens = self.eat(exp!(OpenParen));
let parens = if self.eat(exp!(OpenParen)) { ast::Parens::Yes } else { ast::Parens::No };
let bound = if self.token.is_lifetime() {
self.parse_generic_lt_bound(lo, has_parens)?
self.parse_generic_lt_bound(lo, parens)?
} else if self.eat_keyword(exp!(Use)) {
// parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
// lifetimes and ident params (including SelfUpper). These are validated later
@ -904,7 +921,7 @@ impl<'a> Parser<'a> {
let (args, args_span) = self.parse_precise_capturing_args()?;
GenericBound::Use(args, use_span.to(args_span))
} else {
self.parse_generic_ty_bound(lo, has_parens, &leading_token)?
self.parse_generic_ty_bound(lo, parens, &leading_token)?
};
Ok(bound)
@ -914,10 +931,14 @@ impl<'a> Parser<'a> {
/// ```ebnf
/// LT_BOUND = LIFETIME
/// ```
fn parse_generic_lt_bound(&mut self, lo: Span, has_parens: bool) -> PResult<'a, GenericBound> {
fn parse_generic_lt_bound(
&mut self,
lo: Span,
parens: ast::Parens,
) -> PResult<'a, GenericBound> {
let lt = self.expect_lifetime();
let bound = GenericBound::Outlives(lt);
if has_parens {
if let ast::Parens::Yes = parens {
// FIXME(Centril): Consider not erroring here and accepting `('lt)` instead,
// possibly introducing `GenericBound::Paren(P<GenericBound>)`?
self.recover_paren_lifetime(lo)?;
@ -1090,7 +1111,7 @@ impl<'a> Parser<'a> {
fn parse_generic_ty_bound(
&mut self,
lo: Span,
has_parens: bool,
parens: ast::Parens,
leading_token: &Token,
) -> PResult<'a, GenericBound> {
let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?;
@ -1116,7 +1137,7 @@ impl<'a> Parser<'a> {
// e.g. `T: for<'a> 'a` or `T: [const] 'a`.
if self.token.is_lifetime() {
let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span);
return self.parse_generic_lt_bound(lo, has_parens);
return self.parse_generic_lt_bound(lo, parens);
}
if let (more_lifetime_defs, Some(binder_span)) = self.parse_late_bound_lifetime_defs()? {
@ -1183,7 +1204,7 @@ impl<'a> Parser<'a> {
self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?;
}
if has_parens {
if let ast::Parens::Yes = parens {
// Someone has written something like `&dyn (Trait + Other)`. The correct code
// would be `&(dyn Trait + Other)`
if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
@ -1203,7 +1224,7 @@ impl<'a> Parser<'a> {
}
let poly_trait =
PolyTraitRef::new(lifetime_defs, path, modifiers, lo.to(self.prev_token.span));
PolyTraitRef::new(lifetime_defs, path, modifiers, lo.to(self.prev_token.span), parens);
Ok(GenericBound::Trait(poly_trait))
}

View file

@ -259,4 +259,5 @@ pub fn alloc_self_profile_query_strings(tcx: TyCtxt<'_>) {
for alloc in super::ALLOC_SELF_PROFILE_QUERY_STRINGS.iter() {
alloc(tcx, &mut string_cache)
}
tcx.sess.prof.store_query_cache_hits();
}

View file

@ -432,6 +432,7 @@ resolve_undeclared_label =
resolve_underscore_lifetime_is_reserved = `'_` cannot be used here
.label = `'_` is a reserved lifetime name
.help = use another lifetime specifier
resolve_unexpected_res_change_ty_to_const_param_sugg =
you might have meant to write a const parameter here

View file

@ -934,6 +934,7 @@ pub(crate) struct ImplicitElidedLifetimeNotAllowedHere {
#[derive(Diagnostic)]
#[diag(resolve_underscore_lifetime_is_reserved, code = E0637)]
#[help]
pub(crate) struct UnderscoreLifetimeIsReserved {
#[primary_span]
#[label]

View file

@ -1656,7 +1656,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
forward_ty_ban_rib.bindings.swap_remove(i);
forward_ty_ban_rib_const_param_ty.bindings.swap_remove(i);
}
GenericParamKind::Const { ref ty, kw_span: _, ref default } => {
GenericParamKind::Const { ref ty, span: _, ref default } => {
// Const parameters can't have param bounds.
assert!(param.bounds.is_empty());

View file

@ -2940,7 +2940,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
let span = if let [.., bound] = &param.bounds[..] {
bound.span()
} else if let GenericParam {
kind: GenericParamKind::Const { ty, kw_span: _, default }, ..
kind: GenericParamKind::Const { ty, span: _, default }, ..
} = param {
default.as_ref().map(|def| def.value.span).unwrap_or(ty.span)
} else {
@ -3832,6 +3832,7 @@ fn mk_where_bound_predicate(
ref_id: DUMMY_NODE_ID,
},
span: DUMMY_SP,
parens: ast::Parens::No,
})],
};

View file

@ -1775,7 +1775,6 @@ symbols! {
resume,
return_position_impl_trait_in_trait,
return_type_notation,
rhs,
riscv_target_feature,
rlib,
ropi,

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv1".into();
base.llvm_abiname = "elfv1".into();
Target {

View file

@ -12,6 +12,7 @@ pub(crate) fn target() -> Target {
base.stack_probes = StackProbeType::Inline;
// FIXME(compiler-team#422): musl targets should be dynamically linked by default.
base.crt_static_default = true;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv1".into();
base.llvm_abiname = "elfv1".into();
Target {

View file

@ -8,6 +8,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -8,6 +8,7 @@ pub(crate) fn target() -> Target {
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
base.max_atomic_width = Some(64);
base.stack_probes = StackProbeType::Inline;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -10,6 +10,7 @@ pub(crate) fn target() -> Target {
base.stack_probes = StackProbeType::Inline;
// FIXME(compiler-team#422): musl targets should be dynamically linked by default.
base.crt_static_default = true;
base.abi = "elfv2".into();
base.llvm_abiname = "elfv2".into();
Target {

View file

@ -44,6 +44,12 @@ pub fn evaluate_host_effect_obligation<'tcx>(
Err(EvaluationFailure::NoSolution) => {}
}
match evaluate_host_effect_from_conditionally_const_item_bounds(selcx, obligation) {
Ok(result) => return Ok(result),
Err(EvaluationFailure::Ambiguous) => return Err(EvaluationFailure::Ambiguous),
Err(EvaluationFailure::NoSolution) => {}
}
match evaluate_host_effect_from_item_bounds(selcx, obligation) {
Ok(result) => return Ok(result),
Err(EvaluationFailure::Ambiguous) => return Err(EvaluationFailure::Ambiguous),
@ -153,7 +159,9 @@ fn evaluate_host_effect_from_bounds<'tcx>(
}
}
fn evaluate_host_effect_from_item_bounds<'tcx>(
/// Assembles constness bounds from `~const` item bounds on alias types, which only
/// hold if the `~const` where bounds also hold and the parent trait is `~const`.
fn evaluate_host_effect_from_conditionally_const_item_bounds<'tcx>(
selcx: &mut SelectionContext<'_, 'tcx>,
obligation: &HostEffectObligation<'tcx>,
) -> Result<ThinVec<PredicateObligation<'tcx>>, EvaluationFailure> {
@ -232,6 +240,63 @@ fn evaluate_host_effect_from_item_bounds<'tcx>(
}
}
/// Assembles constness bounds "normal" item bounds on aliases, which may include
/// unconditionally `const` bounds that are *not* conditional and thus always hold.
fn evaluate_host_effect_from_item_bounds<'tcx>(
selcx: &mut SelectionContext<'_, 'tcx>,
obligation: &HostEffectObligation<'tcx>,
) -> Result<ThinVec<PredicateObligation<'tcx>>, EvaluationFailure> {
let infcx = selcx.infcx;
let tcx = infcx.tcx;
let drcx = DeepRejectCtxt::relate_rigid_rigid(selcx.tcx());
let mut candidate = None;
let mut consider_ty = obligation.predicate.self_ty();
while let ty::Alias(kind @ (ty::Projection | ty::Opaque), alias_ty) = *consider_ty.kind() {
for clause in tcx.item_bounds(alias_ty.def_id).iter_instantiated(tcx, alias_ty.args) {
let bound_clause = clause.kind();
let ty::ClauseKind::HostEffect(data) = bound_clause.skip_binder() else {
continue;
};
let data = bound_clause.rebind(data);
if data.skip_binder().trait_ref.def_id != obligation.predicate.trait_ref.def_id {
continue;
}
if !drcx.args_may_unify(
obligation.predicate.trait_ref.args,
data.skip_binder().trait_ref.args,
) {
continue;
}
let is_match =
infcx.probe(|_| match_candidate(selcx, obligation, data, true, |_, _| {}).is_ok());
if is_match {
if candidate.is_some() {
return Err(EvaluationFailure::Ambiguous);
} else {
candidate = Some(data);
}
}
}
if kind != ty::Projection {
break;
}
consider_ty = alias_ty.self_ty();
}
if let Some(data) = candidate {
Ok(match_candidate(selcx, obligation, data, true, |_, _| {})
.expect("candidate matched before, so it should match again"))
} else {
Err(EvaluationFailure::NoSolution)
}
}
fn evaluate_host_effect_from_builtin_impls<'tcx>(
selcx: &mut SelectionContext<'_, 'tcx>,
obligation: &HostEffectObligation<'tcx>,

View file

@ -1,9 +1,8 @@
use rustc_data_structures::fx::FxIndexSet;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
use rustc_hir::definitions::{DefPathData, DisambiguatorState};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{self as hir, AmbigArg};
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, ImplTraitInTraitData, TyCtxt};
use rustc_middle::{bug, span_bug};
@ -14,7 +13,6 @@ pub(crate) fn provide(providers: &mut Providers) {
associated_item_def_ids,
associated_items,
associated_types_for_impl_traits_in_associated_fn,
associated_type_for_impl_trait_in_trait,
impl_item_implementor_ids,
..*providers
};
@ -160,20 +158,22 @@ fn associated_item_from_impl_item_ref(impl_item_ref: &hir::ImplItemRef) -> ty::A
container: ty::AssocItemContainer::Impl,
}
}
struct RPITVisitor {
rpits: FxIndexSet<LocalDefId>,
struct RPITVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
synthetics: Vec<LocalDefId>,
data: DefPathData,
disambiguator: DisambiguatorState,
}
impl<'tcx> Visitor<'tcx> for RPITVisitor {
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx, AmbigArg>) {
if let hir::TyKind::OpaqueDef(opaq) = ty.kind
&& self.rpits.insert(opaq.def_id)
{
for bound in opaq.bounds {
intravisit::walk_param_bound(self, bound);
}
}
intravisit::walk_ty(self, ty)
impl<'tcx> Visitor<'tcx> for RPITVisitor<'tcx> {
fn visit_opaque_ty(&mut self, opaque: &'tcx hir::OpaqueTy<'tcx>) -> Self::Result {
self.synthetics.push(associated_type_for_impl_trait_in_trait(
self.tcx,
opaque.def_id,
self.data,
&mut self.disambiguator,
));
intravisit::walk_opaque_ty(self, opaque)
}
}
@ -194,14 +194,18 @@ fn associated_types_for_impl_traits_in_associated_fn(
match tcx.def_kind(parent_def_id) {
DefKind::Trait => {
let mut visitor = RPITVisitor { rpits: FxIndexSet::default() };
if let Some(output) = tcx.hir_get_fn_output(fn_def_id) {
let data = DefPathData::AnonAssocTy(tcx.item_name(fn_def_id.to_def_id()));
let mut visitor = RPITVisitor {
tcx,
synthetics: vec![],
data,
disambiguator: DisambiguatorState::with(parent_def_id, data, 0),
};
visitor.visit_fn_ret_ty(output);
tcx.arena.alloc_from_iter(visitor.rpits.iter().map(|opaque_ty_def_id| {
tcx.associated_type_for_impl_trait_in_trait(opaque_ty_def_id).to_def_id()
}))
tcx.arena.alloc_from_iter(
visitor.synthetics.into_iter().map(|def_id| def_id.to_def_id()),
)
} else {
&[]
}
@ -211,7 +215,6 @@ fn associated_types_for_impl_traits_in_associated_fn(
let Some(trait_fn_def_id) = tcx.associated_item(fn_def_id).trait_item_def_id else {
return &[];
};
tcx.arena.alloc_from_iter(
tcx.associated_types_for_impl_traits_in_associated_fn(trait_fn_def_id).iter().map(
move |&trait_assoc_def_id| {
@ -236,6 +239,8 @@ fn associated_types_for_impl_traits_in_associated_fn(
fn associated_type_for_impl_trait_in_trait(
tcx: TyCtxt<'_>,
opaque_ty_def_id: LocalDefId,
data: DefPathData,
disambiguator: &mut DisambiguatorState,
) -> LocalDefId {
let (hir::OpaqueTyOrigin::FnReturn { parent: fn_def_id, .. }
| hir::OpaqueTyOrigin::AsyncFn { parent: fn_def_id, .. }) =
@ -246,22 +251,15 @@ fn associated_type_for_impl_trait_in_trait(
let trait_def_id = tcx.local_parent(fn_def_id);
assert_eq!(tcx.def_kind(trait_def_id), DefKind::Trait);
// Collect all opaque types in return position for the method and use
// the index as the disambiguator to make an unique def path.
let mut visitor = RPITVisitor { rpits: FxIndexSet::default() };
visitor.visit_fn_ret_ty(tcx.hir_get_fn_output(fn_def_id).unwrap());
let disambiguator = visitor.rpits.get_index_of(&opaque_ty_def_id).unwrap().try_into().unwrap();
let span = tcx.def_span(opaque_ty_def_id);
// Also use the method name to create an unique def path.
let data = DefPathData::AnonAssocTy(tcx.item_name(fn_def_id.to_def_id()));
let trait_assoc_ty = tcx.at(span).create_def(
trait_def_id,
// No name because this is an anonymous associated type.
None,
DefKind::AssocTy,
Some(data),
&mut DisambiguatorState::with(trait_def_id, data, disambiguator),
disambiguator,
);
let local_def_id = trait_assoc_ty.def_id();

View file

@ -1098,6 +1098,124 @@ impl<T: ?Sized> Box<T> {
pub unsafe fn from_non_null(ptr: NonNull<T>) -> Self {
unsafe { Self::from_raw(ptr.as_ptr()) }
}
/// Consumes the `Box`, returning a wrapped raw pointer.
///
/// The pointer will be properly aligned and non-null.
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
/// caller should properly destroy `T` and release the memory, taking
/// into account the [memory layout] used by `Box`. The easiest way to
/// do this is to convert the raw pointer back into a `Box` with the
/// [`Box::from_raw`] function, allowing the `Box` destructor to perform
/// the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
/// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
/// for automatic cleanup:
/// ```
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// let x = unsafe { Box::from_raw(ptr) };
/// ```
/// Manual cleanup by explicitly running the destructor and deallocating
/// the memory:
/// ```
/// use std::alloc::{dealloc, Layout};
/// use std::ptr;
///
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// unsafe {
/// ptr::drop_in_place(ptr);
/// dealloc(ptr as *mut u8, Layout::new::<String>());
/// }
/// ```
/// Note: This is equivalent to the following:
/// ```
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// unsafe {
/// drop(Box::from_raw(ptr));
/// }
/// ```
///
/// [memory layout]: self#memory-layout
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub fn into_raw(b: Self) -> *mut T {
// Avoid `into_raw_with_allocator` as that interacts poorly with Miri's Stacked Borrows.
let mut b = mem::ManuallyDrop::new(b);
// We go through the built-in deref for `Box`, which is crucial for Miri to recognize this
// operation for it's alias tracking.
&raw mut **b
}
/// Consumes the `Box`, returning a wrapped `NonNull` pointer.
///
/// The pointer will be properly aligned.
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
/// caller should properly destroy `T` and release the memory, taking
/// into account the [memory layout] used by `Box`. The easiest way to
/// do this is to convert the `NonNull` pointer back into a `Box` with the
/// [`Box::from_non_null`] function, allowing the `Box` destructor to
/// perform the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_non_null(b)` instead of `b.into_non_null()`.
/// This is so that there is no conflict with a method on the inner type.
///
/// # Examples
/// Converting the `NonNull` pointer back into a `Box` with [`Box::from_non_null`]
/// for automatic cleanup:
/// ```
/// #![feature(box_vec_non_null)]
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// let x = unsafe { Box::from_non_null(non_null) };
/// ```
/// Manual cleanup by explicitly running the destructor and deallocating
/// the memory:
/// ```
/// #![feature(box_vec_non_null)]
///
/// use std::alloc::{dealloc, Layout};
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// unsafe {
/// non_null.drop_in_place();
/// dealloc(non_null.as_ptr().cast::<u8>(), Layout::new::<String>());
/// }
/// ```
/// Note: This is equivalent to the following:
/// ```
/// #![feature(box_vec_non_null)]
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// unsafe {
/// drop(Box::from_non_null(non_null));
/// }
/// ```
///
/// [memory layout]: self#memory-layout
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")]
#[inline]
pub fn into_non_null(b: Self) -> NonNull<T> {
// SAFETY: `Box` is guaranteed to be non-null.
unsafe { NonNull::new_unchecked(Self::into_raw(b)) }
}
}
impl<T: ?Sized, A: Allocator> Box<T, A> {
@ -1208,121 +1326,6 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
unsafe { Box::from_raw_in(raw.as_ptr(), alloc) }
}
/// Consumes the `Box`, returning a wrapped raw pointer.
///
/// The pointer will be properly aligned and non-null.
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
/// caller should properly destroy `T` and release the memory, taking
/// into account the [memory layout] used by `Box`. The easiest way to
/// do this is to convert the raw pointer back into a `Box` with the
/// [`Box::from_raw`] function, allowing the `Box` destructor to perform
/// the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
/// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
/// for automatic cleanup:
/// ```
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// let x = unsafe { Box::from_raw(ptr) };
/// ```
/// Manual cleanup by explicitly running the destructor and deallocating
/// the memory:
/// ```
/// use std::alloc::{dealloc, Layout};
/// use std::ptr;
///
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// unsafe {
/// ptr::drop_in_place(ptr);
/// dealloc(ptr as *mut u8, Layout::new::<String>());
/// }
/// ```
/// Note: This is equivalent to the following:
/// ```
/// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
/// unsafe {
/// drop(Box::from_raw(ptr));
/// }
/// ```
///
/// [memory layout]: self#memory-layout
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub fn into_raw(b: Self) -> *mut T {
// Make sure Miri realizes that we transition from a noalias pointer to a raw pointer here.
unsafe { &raw mut *&mut *Self::into_raw_with_allocator(b).0 }
}
/// Consumes the `Box`, returning a wrapped `NonNull` pointer.
///
/// The pointer will be properly aligned.
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
/// caller should properly destroy `T` and release the memory, taking
/// into account the [memory layout] used by `Box`. The easiest way to
/// do this is to convert the `NonNull` pointer back into a `Box` with the
/// [`Box::from_non_null`] function, allowing the `Box` destructor to
/// perform the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_non_null(b)` instead of `b.into_non_null()`.
/// This is so that there is no conflict with a method on the inner type.
///
/// # Examples
/// Converting the `NonNull` pointer back into a `Box` with [`Box::from_non_null`]
/// for automatic cleanup:
/// ```
/// #![feature(box_vec_non_null)]
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// let x = unsafe { Box::from_non_null(non_null) };
/// ```
/// Manual cleanup by explicitly running the destructor and deallocating
/// the memory:
/// ```
/// #![feature(box_vec_non_null)]
///
/// use std::alloc::{dealloc, Layout};
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// unsafe {
/// non_null.drop_in_place();
/// dealloc(non_null.as_ptr().cast::<u8>(), Layout::new::<String>());
/// }
/// ```
/// Note: This is equivalent to the following:
/// ```
/// #![feature(box_vec_non_null)]
///
/// let x = Box::new(String::from("Hello"));
/// let non_null = Box::into_non_null(x);
/// unsafe {
/// drop(Box::from_non_null(non_null));
/// }
/// ```
///
/// [memory layout]: self#memory-layout
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")]
#[inline]
pub fn into_non_null(b: Self) -> NonNull<T> {
// SAFETY: `Box` is guaranteed to be non-null.
unsafe { NonNull::new_unchecked(Self::into_raw(b)) }
}
/// Consumes the `Box`, returning a wrapped raw pointer and the allocator.
///
/// The pointer will be properly aligned and non-null.
@ -1602,7 +1605,9 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
where
A: 'a,
{
unsafe { &mut *Box::into_raw(b) }
let (ptr, alloc) = Box::into_raw_with_allocator(b);
mem::forget(alloc);
unsafe { &mut *ptr }
}
/// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then

View file

@ -1322,6 +1322,30 @@ impl<T: ?Sized> Rc<T> {
unsafe { Self::from_raw_in(ptr, Global) }
}
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
/// [`Rc::from_raw`].
///
/// # Examples
///
/// ```
/// use std::rc::Rc;
///
/// let x = Rc::new("hello".to_owned());
/// let x_ptr = Rc::into_raw(x);
/// assert_eq!(unsafe { &*x_ptr }, "hello");
/// # // Prevent leaks for Miri.
/// # drop(unsafe { Rc::from_raw(x_ptr) });
/// ```
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
/// Increments the strong reference count on the `Rc<T>` associated with the
/// provided pointer by one.
///
@ -1408,30 +1432,6 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
&this.alloc
}
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
/// [`Rc::from_raw`].
///
/// # Examples
///
/// ```
/// use std::rc::Rc;
///
/// let x = Rc::new("hello".to_owned());
/// let x_ptr = Rc::into_raw(x);
/// assert_eq!(unsafe { &*x_ptr }, "hello");
/// # // Prevent leaks for Miri.
/// # drop(unsafe { Rc::from_raw(x_ptr) });
/// ```
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
/// Consumes the `Rc`, returning the wrapped pointer and allocator.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
@ -1525,7 +1525,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// use std::alloc::System;
///
/// let x = Rc::new_in("hello".to_owned(), System);
/// let x_ptr = Rc::into_raw(x);
/// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
///
/// unsafe {
/// // Convert back to an `Rc` to prevent leak.
@ -1547,7 +1547,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// use std::alloc::System;
///
/// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
/// let x_ptr: *const [u32] = Rc::into_raw(x);
/// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
///
/// unsafe {
/// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
@ -1648,7 +1648,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
/// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
/// Rc::increment_strong_count_in(ptr, System);
///
/// let five = Rc::from_raw_in(ptr, System);
@ -1694,7 +1694,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
/// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
/// Rc::increment_strong_count_in(ptr, System);
///
/// let five = Rc::from_raw_in(ptr, System);
@ -3123,6 +3123,39 @@ impl<T: ?Sized> Weak<T> {
pub unsafe fn from_raw(ptr: *const T) -> Self {
unsafe { Self::from_raw_in(ptr, Global) }
}
/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// use std::rc::{Rc, Weak};
///
/// let strong = Rc::new("hello".to_owned());
/// let weak = Rc::downgrade(&strong);
/// let raw = weak.into_raw();
///
/// assert_eq!(1, Rc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Rc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: Weak::from_raw
/// [`as_ptr`]: Weak::as_ptr
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
mem::ManuallyDrop::new(self).as_ptr()
}
}
impl<T: ?Sized, A: Allocator> Weak<T, A> {
@ -3175,39 +3208,6 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
}
}
/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// use std::rc::{Rc, Weak};
///
/// let strong = Rc::new("hello".to_owned());
/// let weak = Rc::downgrade(&strong);
/// let raw = weak.into_raw();
///
/// assert_eq!(1, Rc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Rc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: Weak::from_raw
/// [`as_ptr`]: Weak::as_ptr
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
mem::ManuallyDrop::new(self).as_ptr()
}
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of

View file

@ -1467,6 +1467,30 @@ impl<T: ?Sized> Arc<T> {
unsafe { Arc::from_raw_in(ptr, Global) }
}
/// Consumes the `Arc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
/// [`Arc::from_raw`].
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let x = Arc::new("hello".to_owned());
/// let x_ptr = Arc::into_raw(x);
/// assert_eq!(unsafe { &*x_ptr }, "hello");
/// # // Prevent leaks for Miri.
/// # drop(unsafe { Arc::from_raw(x_ptr) });
/// ```
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
/// Increments the strong reference count on the `Arc<T>` associated with the
/// provided pointer by one.
///
@ -1558,30 +1582,6 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
&this.alloc
}
/// Consumes the `Arc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
/// [`Arc::from_raw`].
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let x = Arc::new("hello".to_owned());
/// let x_ptr = Arc::into_raw(x);
/// assert_eq!(unsafe { &*x_ptr }, "hello");
/// # // Prevent leaks for Miri.
/// # drop(unsafe { Arc::from_raw(x_ptr) });
/// ```
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
/// Consumes the `Arc`, returning the wrapped pointer and allocator.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
@ -1676,7 +1676,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// use std::alloc::System;
///
/// let x = Arc::new_in("hello".to_owned(), System);
/// let x_ptr = Arc::into_raw(x);
/// let (x_ptr, alloc) = Arc::into_raw_with_allocator(x);
///
/// unsafe {
/// // Convert back to an `Arc` to prevent leak.
@ -1698,7 +1698,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// use std::alloc::System;
///
/// let x: Arc<[u32], _> = Arc::new_in([1, 2, 3], System);
/// let x_ptr: *const [u32] = Arc::into_raw(x);
/// let x_ptr: *const [u32] = Arc::into_raw_with_allocator(x).0;
///
/// unsafe {
/// let x: Arc<[u32; 3], _> = Arc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
@ -1850,7 +1850,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// let five = Arc::new_in(5, System);
///
/// unsafe {
/// let ptr = Arc::into_raw(five);
/// let (ptr, _alloc) = Arc::into_raw_with_allocator(five);
/// Arc::increment_strong_count_in(ptr, System);
///
/// // This assertion is deterministic because we haven't shared
@ -1899,7 +1899,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// let five = Arc::new_in(5, System);
///
/// unsafe {
/// let ptr = Arc::into_raw(five);
/// let (ptr, _alloc) = Arc::into_raw_with_allocator(five);
/// Arc::increment_strong_count_in(ptr, System);
///
/// // Those assertions are deterministic because we haven't shared
@ -2863,6 +2863,39 @@ impl<T: ?Sized> Weak<T> {
pub unsafe fn from_raw(ptr: *const T) -> Self {
unsafe { Weak::from_raw_in(ptr, Global) }
}
/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new("hello".to_owned());
/// let weak = Arc::downgrade(&strong);
/// let raw = weak.into_raw();
///
/// assert_eq!(1, Arc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Arc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: Weak::from_raw
/// [`as_ptr`]: Weak::as_ptr
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
ManuallyDrop::new(self).as_ptr()
}
}
impl<T: ?Sized, A: Allocator> Weak<T, A> {
@ -2915,39 +2948,6 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
}
}
/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
/// one weak reference (the weak count is not modified by this operation). It can be turned
/// back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_ptr`] apply.
///
/// # Examples
///
/// ```
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new("hello".to_owned());
/// let weak = Arc::downgrade(&strong);
/// let raw = weak.into_raw();
///
/// assert_eq!(1, Arc::weak_count(&strong));
/// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Arc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: Weak::from_raw
/// [`as_ptr`]: Weak::as_ptr
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
ManuallyDrop::new(self).as_ptr()
}
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
///
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of

View file

@ -761,6 +761,88 @@ impl<T> Vec<T> {
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
PeekMut::new(self)
}
/// Decomposes a `Vec<T>` into its raw components: `(pointer, length, capacity)`.
///
/// Returns the raw pointer to the underlying data, the length of
/// the vector (in elements), and the allocated capacity of the
/// data (in elements). These are the same arguments in the same
/// order as the arguments to [`from_raw_parts`].
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Vec`. The only way to do
/// this is to convert the raw pointer, length, and capacity back
/// into a `Vec` with the [`from_raw_parts`] function, allowing
/// the destructor to perform the cleanup.
///
/// [`from_raw_parts`]: Vec::from_raw_parts
///
/// # Examples
///
/// ```
/// #![feature(vec_into_raw_parts)]
/// let v: Vec<i32> = vec![-1, 0, 1];
///
/// let (ptr, len, cap) = v.into_raw_parts();
///
/// let rebuilt = unsafe {
/// // We can now make changes to the components, such as
/// // transmuting the raw pointer to a compatible type.
/// let ptr = ptr as *mut u32;
///
/// Vec::from_raw_parts(ptr, len, cap)
/// };
/// assert_eq!(rebuilt, [4294967295, 0, 1]);
/// ```
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
let mut me = ManuallyDrop::new(self);
(me.as_mut_ptr(), me.len(), me.capacity())
}
#[doc(alias = "into_non_null_parts")]
/// Decomposes a `Vec<T>` into its raw components: `(NonNull pointer, length, capacity)`.
///
/// Returns the `NonNull` pointer to the underlying data, the length of
/// the vector (in elements), and the allocated capacity of the
/// data (in elements). These are the same arguments in the same
/// order as the arguments to [`from_parts`].
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Vec`. The only way to do
/// this is to convert the `NonNull` pointer, length, and capacity back
/// into a `Vec` with the [`from_parts`] function, allowing
/// the destructor to perform the cleanup.
///
/// [`from_parts`]: Vec::from_parts
///
/// # Examples
///
/// ```
/// #![feature(vec_into_raw_parts, box_vec_non_null)]
///
/// let v: Vec<i32> = vec![-1, 0, 1];
///
/// let (ptr, len, cap) = v.into_parts();
///
/// let rebuilt = unsafe {
/// // We can now make changes to the components, such as
/// // transmuting the raw pointer to a compatible type.
/// let ptr = ptr.cast::<u32>();
///
/// Vec::from_parts(ptr, len, cap)
/// };
/// assert_eq!(rebuilt, [4294967295, 0, 1]);
/// ```
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")]
// #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
pub fn into_parts(self) -> (NonNull<T>, usize, usize) {
let (ptr, len, capacity) = self.into_raw_parts();
// SAFETY: A `Vec` always has a non-null pointer.
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
}
}
impl<T, A: Allocator> Vec<T, A> {
@ -1095,88 +1177,6 @@ impl<T, A: Allocator> Vec<T, A> {
unsafe { Vec { buf: RawVec::from_nonnull_in(ptr, capacity, alloc), len: length } }
}
/// Decomposes a `Vec<T>` into its raw components: `(pointer, length, capacity)`.
///
/// Returns the raw pointer to the underlying data, the length of
/// the vector (in elements), and the allocated capacity of the
/// data (in elements). These are the same arguments in the same
/// order as the arguments to [`from_raw_parts`].
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Vec`. The only way to do
/// this is to convert the raw pointer, length, and capacity back
/// into a `Vec` with the [`from_raw_parts`] function, allowing
/// the destructor to perform the cleanup.
///
/// [`from_raw_parts`]: Vec::from_raw_parts
///
/// # Examples
///
/// ```
/// #![feature(vec_into_raw_parts)]
/// let v: Vec<i32> = vec![-1, 0, 1];
///
/// let (ptr, len, cap) = v.into_raw_parts();
///
/// let rebuilt = unsafe {
/// // We can now make changes to the components, such as
/// // transmuting the raw pointer to a compatible type.
/// let ptr = ptr as *mut u32;
///
/// Vec::from_raw_parts(ptr, len, cap)
/// };
/// assert_eq!(rebuilt, [4294967295, 0, 1]);
/// ```
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
let mut me = ManuallyDrop::new(self);
(me.as_mut_ptr(), me.len(), me.capacity())
}
#[doc(alias = "into_non_null_parts")]
/// Decomposes a `Vec<T>` into its raw components: `(NonNull pointer, length, capacity)`.
///
/// Returns the `NonNull` pointer to the underlying data, the length of
/// the vector (in elements), and the allocated capacity of the
/// data (in elements). These are the same arguments in the same
/// order as the arguments to [`from_parts`].
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Vec`. The only way to do
/// this is to convert the `NonNull` pointer, length, and capacity back
/// into a `Vec` with the [`from_parts`] function, allowing
/// the destructor to perform the cleanup.
///
/// [`from_parts`]: Vec::from_parts
///
/// # Examples
///
/// ```
/// #![feature(vec_into_raw_parts, box_vec_non_null)]
///
/// let v: Vec<i32> = vec![-1, 0, 1];
///
/// let (ptr, len, cap) = v.into_parts();
///
/// let rebuilt = unsafe {
/// // We can now make changes to the components, such as
/// // transmuting the raw pointer to a compatible type.
/// let ptr = ptr.cast::<u32>();
///
/// Vec::from_parts(ptr, len, cap)
/// };
/// assert_eq!(rebuilt, [4294967295, 0, 1]);
/// ```
#[must_use = "losing the pointer will leak memory"]
#[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")]
// #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
pub fn into_parts(self) -> (NonNull<T>, usize, usize) {
let (ptr, len, capacity) = self.into_raw_parts();
// SAFETY: A `Vec` always has a non-null pointer.
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
}
/// Decomposes a `Vec<T>` into its raw components: `(pointer, length, capacity, allocator)`.
///
/// Returns the raw pointer to the underlying data, the length of the vector (in elements),
@ -3031,6 +3031,61 @@ impl<T, A: Allocator> Vec<T, A> {
(initialized, spare, &mut self.len)
}
}
/// Groups every `N` elements in the `Vec<T>` into chunks to produce a `Vec<[T; N]>`, dropping
/// elements in the remainder. `N` must be greater than zero.
///
/// If the capacity is not a multiple of the chunk size, the buffer will shrink down to the
/// nearest multiple with a reallocation or deallocation.
///
/// This function can be used to reverse [`Vec::into_flattened`].
///
/// # Examples
///
/// ```
/// #![feature(vec_into_chunks)]
///
/// let vec = vec![0, 1, 2, 3, 4, 5, 6, 7];
/// assert_eq!(vec.into_chunks::<3>(), [[0, 1, 2], [3, 4, 5]]);
///
/// let vec = vec![0, 1, 2, 3];
/// let chunks: Vec<[u8; 10]> = vec.into_chunks();
/// assert!(chunks.is_empty());
///
/// let flat = vec![0; 8 * 8 * 8];
/// let reshaped: Vec<[[[u8; 8]; 8]; 8]> = flat.into_chunks().into_chunks().into_chunks();
/// assert_eq!(reshaped.len(), 1);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "vec_into_chunks", issue = "142137")]
pub fn into_chunks<const N: usize>(mut self) -> Vec<[T; N], A> {
const {
assert!(N != 0, "chunk size must be greater than zero");
}
let (len, cap) = (self.len(), self.capacity());
let len_remainder = len % N;
if len_remainder != 0 {
self.truncate(len - len_remainder);
}
let cap_remainder = cap % N;
if !T::IS_ZST && cap_remainder != 0 {
self.buf.shrink_to_fit(cap - cap_remainder);
}
let (ptr, _, _, alloc) = self.into_raw_parts_with_alloc();
// SAFETY:
// - `ptr` and `alloc` were just returned from `self.into_raw_parts_with_alloc()`
// - `[T; N]` has the same alignment as `T`
// - `size_of::<[T; N]>() * cap / N == size_of::<T>() * cap`
// - `len / N <= cap / N` because `len <= cap`
// - the allocated memory consists of `len / N` valid values of type `[T; N]`
// - `cap / N` fits the size of the allocated memory after shrinking
unsafe { Vec::from_raw_parts_in(ptr.cast(), len / N, cap / N, alloc) }
}
}
impl<T: Clone, A: Allocator> Vec<T, A> {

View file

@ -347,7 +347,7 @@ impl dyn Error {
/// let b = B(Some(Box::new(A)));
///
/// // let err : Box<Error> = b.into(); // or
/// let err = &b as &(dyn Error);
/// let err = &b as &dyn Error;
///
/// let mut iter = err.sources();
///

View file

@ -1,6 +1,6 @@
Equivalent to C's `char` type.
[C's `char` type] is completely unlike [Rust's `char` type]; while Rust's type represents a unicode scalar value, C's `char` type is just an ordinary integer. On modern architectures this type will always be either [`i8`] or [`u8`], as they use byte-addresses memory with 8-bit bytes.
[C's `char` type] is completely unlike [Rust's `char` type]; while Rust's type represents a unicode scalar value, C's `char` type is just an ordinary integer. On modern architectures this type will always be either [`i8`] or [`u8`], as they use byte-addressed memory with 8-bit bytes.
C chars are most commonly used to make C strings. Unlike Rust, where the length of a string is included alongside the string, C strings mark the end of a string with the character `'\0'`. See `CStr` for more information.

View file

@ -2867,7 +2867,7 @@ macro_rules! tuple {
maybe_tuple_doc! {
$($name)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($name:Debug),+> Debug for ($($name,)+) where last_type!($($name,)+): ?Sized {
impl<$($name:Debug),+> Debug for ($($name,)+) {
#[allow(non_snake_case, unused_assignments)]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut builder = f.debug_tuple("");
@ -2898,11 +2898,6 @@ macro_rules! maybe_tuple_doc {
};
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple! { E, D, C, B, A, Z, Y, X, W, V, U, T, }
#[stable(feature = "rust1", since = "1.0.0")]

View file

@ -886,7 +886,7 @@ mod impls {
maybe_tuple_doc! {
$($name)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($name: Hash),+> Hash for ($($name,)+) where last_type!($($name,)+): ?Sized {
impl<$($name: Hash),+> Hash for ($($name,)+) {
#[allow(non_snake_case)]
#[inline]
fn hash<S: Hasher>(&self, state: &mut S) {
@ -912,11 +912,6 @@ mod impls {
};
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
impl_hash_tuple! {}
impl_hash_tuple! { T }
impl_hash_tuple! { T B }

View file

@ -472,7 +472,8 @@ pub fn select_unpredictable<T>(b: bool, true_val: T, false_val: T) -> T {
}
/// A guard for unsafe functions that cannot ever be executed if `T` is uninhabited:
/// This will statically either panic, or do nothing.
/// This will statically either panic, or do nothing. It does not *guarantee* to ever panic,
/// and should only be called if an assertion failure will imply language UB in the following code.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_intrinsic_const_stable_indirect]
@ -481,7 +482,9 @@ pub fn select_unpredictable<T>(b: bool, true_val: T, false_val: T) -> T {
pub const fn assert_inhabited<T>();
/// A guard for unsafe functions that cannot ever be executed if `T` does not permit
/// zero-initialization: This will statically either panic, or do nothing.
/// zero-initialization: This will statically either panic, or do nothing. It does not *guarantee*
/// to ever panic, and should only be called if an assertion failure will imply language UB in the
/// following code.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_intrinsic_const_stable_indirect]
@ -489,7 +492,9 @@ pub const fn assert_inhabited<T>();
#[rustc_intrinsic]
pub const fn assert_zero_valid<T>();
/// A guard for `std::mem::uninitialized`. This will statically either panic, or do nothing.
/// A guard for `std::mem::uninitialized`. This will statically either panic, or do nothing. It does
/// not *guarantee* to ever panic, and should only be called if an assertion failure will imply
/// language UB in the following code.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_intrinsic_const_stable_indirect]

View file

@ -616,7 +616,9 @@ impl<T> MaybeUninit<T> {
// This also means that `self` must be a `value` variant.
unsafe {
intrinsics::assert_inhabited::<T>();
ManuallyDrop::into_inner(self.value)
// We do this via a raw ptr read instead of `ManuallyDrop::into_inner` so that there's
// no trace of `ManuallyDrop` in Miri's error messages here.
(&raw const self.value).cast::<T>().read()
}
}

View file

@ -79,7 +79,7 @@ pub struct ParseIntError {
/// # }
/// ```
#[stable(feature = "int_error_matching", since = "1.55.0")]
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)]
#[non_exhaustive]
pub enum IntErrorKind {
/// Value being parsed is empty.

View file

@ -1211,7 +1211,7 @@ pub enum OneSidedRangeBound {
/// Types that implement `OneSidedRange<T>` must return `Bound::Unbounded`
/// from one of `RangeBounds::start_bound` or `RangeBounds::end_bound`.
#[unstable(feature = "one_sided_range", issue = "69780")]
pub trait OneSidedRange<T: ?Sized>: RangeBounds<T> {
pub trait OneSidedRange<T>: RangeBounds<T> {
/// An internal-only helper function for `split_off` and
/// `split_off_mut` that returns the bound of the one-sided range.
fn bound(self) -> (OneSidedRangeBound, T);

View file

@ -1,7 +1,7 @@
// See core/src/primitive_docs.rs for documentation.
use crate::cmp::Ordering::{self, *};
use crate::marker::{ConstParamTy_, PointeeSized, StructuralPartialEq, UnsizedConstParamTy};
use crate::marker::{ConstParamTy_, StructuralPartialEq, UnsizedConstParamTy};
use crate::ops::ControlFlow::{self, Break, Continue};
use crate::random::{Random, RandomSource};
@ -24,10 +24,7 @@ macro_rules! tuple_impls {
maybe_tuple_doc! {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T: PartialEq),+> PartialEq for ($($T,)+)
where
last_type!($($T,)+): PointeeSized
{
impl<$($T: PartialEq),+> PartialEq for ($($T,)+) {
#[inline]
fn eq(&self, other: &($($T,)+)) -> bool {
$( ${ignore($T)} self.${index()} == other.${index()} )&&+
@ -43,8 +40,6 @@ macro_rules! tuple_impls {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T: Eq),+> Eq for ($($T,)+)
where
last_type!($($T,)+): PointeeSized
{}
}
@ -73,8 +68,6 @@ macro_rules! tuple_impls {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T: PartialOrd),+> PartialOrd for ($($T,)+)
where
last_type!($($T,)+): PointeeSized
{
#[inline]
fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
@ -119,8 +112,6 @@ macro_rules! tuple_impls {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T: Ord),+> Ord for ($($T,)+)
where
last_type!($($T,)+): PointeeSized
{
#[inline]
fn cmp(&self, other: &($($T,)+)) -> Ordering {
@ -245,9 +236,4 @@ macro_rules! lexical_cmp {
($a:expr, $b:expr) => { ($a).cmp(&$b) };
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple_impls!(E D C B A Z Y X W V U T);

View file

@ -4,6 +4,27 @@
//! filesystem. All methods in this module represent cross-platform filesystem
//! operations. Extra platform-specific functionality can be found in the
//! extension traits of `std::os::$platform`.
//!
//! # Time of Check to Time of Use (TOCTOU)
//!
//! Many filesystem operations are subject to a race condition known as "Time of Check to Time of Use"
//! (TOCTOU). This occurs when a program checks a condition (like file existence or permissions)
//! and then uses the result of that check to make a decision, but the condition may have changed
//! between the check and the use.
//!
//! For example, checking if a file exists and then creating it if it doesn't is vulnerable to
//! TOCTOU - another process could create the file between your check and creation attempt.
//!
//! Another example is with symbolic links: when removing a directory, if another process replaces
//! the directory with a symbolic link between the check and the removal operation, the removal
//! might affect the wrong location. This is why operations like [`remove_dir_all`] need to use
//! atomic operations to prevent such race conditions.
//!
//! To avoid TOCTOU issues:
//! - Be aware that metadata operations (like [`metadata`] or [`symlink_metadata`]) may be affected by
//! changes made by other processes.
//! - Use atomic operations when possible (like [`File::create_new`] instead of checking existence then creating).
//! - Keep file open for the duration of operations.
#![stable(feature = "rust1", since = "1.0.0")]
#![deny(unsafe_op_in_unsafe_fn)]
@ -548,13 +569,14 @@ impl File {
/// non-exhaustive list of likely errors.
///
/// This option is useful because it is atomic. Otherwise between checking whether a file
/// exists and creating a new one, the file may have been created by another process (a TOCTOU
/// exists and creating a new one, the file may have been created by another process (a [TOCTOU]
/// race condition / attack).
///
/// This can also be written using
/// `File::options().read(true).write(true).create_new(true).open(...)`.
///
/// [`AlreadyExists`]: crate::io::ErrorKind::AlreadyExists
/// [TOCTOU]: self#time-of-check-to-time-of-use-toctou
///
/// # Examples
///
@ -1610,7 +1632,7 @@ impl OpenOptions {
///
/// This option is useful because it is atomic. Otherwise between checking
/// whether a file exists and creating a new one, the file may have been
/// created by another process (a TOCTOU race condition / attack).
/// created by another process (a [TOCTOU] race condition / attack).
///
/// If `.create_new(true)` is set, [`.create()`] and [`.truncate()`] are
/// ignored.
@ -1621,6 +1643,7 @@ impl OpenOptions {
/// [`.create()`]: OpenOptions::create
/// [`.truncate()`]: OpenOptions::truncate
/// [`AlreadyExists`]: io::ErrorKind::AlreadyExists
/// [TOCTOU]: self#time-of-check-to-time-of-use-toctou
///
/// # Examples
///
@ -2954,17 +2977,17 @@ pub fn remove_dir<P: AsRef<Path>>(path: P) -> io::Result<()> {
/// `GetFileInformationByHandleEx`, `SetFileInformationByHandle`, and `NtCreateFile`.
///
/// ## Time-of-check to time-of-use (TOCTOU) race conditions
/// On a few platforms there is no way to remove a directory's contents without following symlinks
/// unless you perform a check and then operate on paths based on that directory.
/// This allows concurrently-running code to replace the directory with a symlink after the check,
/// causing a removal to instead operate on a path based on the symlink. This is a TOCTOU race.
/// By default, `fs::remove_dir_all` protects against a symlink TOCTOU race on all platforms
/// except the following. It should not be used in security-sensitive contexts on these platforms:
/// - Miri: Even when emulating targets where the underlying implementation will protect against
/// TOCTOU races, Miri will not do so.
/// - Redox OS: This function does not protect against TOCTOU races, as Redox does not implement
/// the required platform support to do so.
/// See the [module-level TOCTOU explanation](self#time-of-check-to-time-of-use-toctou).
///
/// On most platforms, `fs::remove_dir_all` protects against symlink TOCTOU races by default.
/// However, on the following platforms, this protection is not provided and the function should
/// not be used in security-sensitive contexts:
/// - **Miri**: Even when emulating targets where the underlying implementation will protect against
/// TOCTOU races, Miri will not do so.
/// - **Redox OS**: This function does not protect against TOCTOU races, as Redox does not implement
/// the required platform support to do so.
///
/// [TOCTOU]: self#time-of-check-to-time-of-use-toctou
/// [changes]: io#platform-specific-behavior
///
/// # Errors
@ -3238,7 +3261,7 @@ impl AsInnerMut<fs_imp::DirBuilder> for DirBuilder {
/// permission is denied on one of the parent directories.
///
/// Note that while this avoids some pitfalls of the `exists()` method, it still can not
/// prevent time-of-check to time-of-use (TOCTOU) bugs. You should only use it in scenarios
/// prevent time-of-check to time-of-use ([TOCTOU]) bugs. You should only use it in scenarios
/// where those bugs are not an issue.
///
/// # Examples
@ -3251,6 +3274,7 @@ impl AsInnerMut<fs_imp::DirBuilder> for DirBuilder {
/// ```
///
/// [`Path::exists`]: crate::path::Path::exists
/// [TOCTOU]: self#time-of-check-to-time-of-use-toctou
#[stable(feature = "fs_try_exists", since = "1.81.0")]
#[inline]
pub fn exists<P: AsRef<Path>>(path: P) -> io::Result<bool> {

View file

@ -3127,7 +3127,7 @@ impl Path {
/// Returns `true` if the path points at an existing entity.
///
/// Warning: this method may be error-prone, consider using [`try_exists()`] instead!
/// It also has a risk of introducing time-of-check to time-of-use (TOCTOU) bugs.
/// It also has a risk of introducing time-of-check to time-of-use ([TOCTOU]) bugs.
///
/// This function will traverse symbolic links to query information about the
/// destination file.
@ -3148,6 +3148,7 @@ impl Path {
/// check errors, call [`Path::try_exists`].
///
/// [`try_exists()`]: Self::try_exists
/// [TOCTOU]: fs#time-of-check-to-time-of-use-toctou
#[stable(feature = "path_ext", since = "1.5.0")]
#[must_use]
#[inline]
@ -3167,7 +3168,7 @@ impl Path {
/// permission is denied on one of the parent directories.
///
/// Note that while this avoids some pitfalls of the `exists()` method, it still can not
/// prevent time-of-check to time-of-use (TOCTOU) bugs. You should only use it in scenarios
/// prevent time-of-check to time-of-use ([TOCTOU]) bugs. You should only use it in scenarios
/// where those bugs are not an issue.
///
/// This is an alias for [`std::fs::exists`](crate::fs::exists).
@ -3180,6 +3181,7 @@ impl Path {
/// assert!(Path::new("/root/secret_file.txt").try_exists().is_err());
/// ```
///
/// [TOCTOU]: fs#time-of-check-to-time-of-use-toctou
/// [`exists()`]: Self::exists
#[stable(feature = "path_try_exists", since = "1.63.0")]
#[inline]

View file

@ -5,7 +5,7 @@ use crate::core::build_steps::compile::{
};
use crate::core::build_steps::tool::{COMPILETEST_ALLOW_FEATURES, SourceType, prepare_tool_cargo};
use crate::core::builder::{
self, Alias, Builder, Kind, RunConfig, ShouldRun, Step, crate_description,
self, Alias, Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata, crate_description,
};
use crate::core::config::TargetSelection;
use crate::utils::build_stamp::{self, BuildStamp};
@ -167,6 +167,10 @@ impl Step for Std {
let _guard = builder.msg_check("library test/bench/example targets", target, Some(stage));
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("std", self.target))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -258,6 +262,10 @@ impl Step for Rustc {
let hostdir = builder.sysroot_target_libdir(compiler, compiler.host);
add_to_sysroot(builder, &libdir, &hostdir, &stamp);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("rustc", self.target))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -315,6 +323,10 @@ impl Step for CodegenBackend {
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check(self.backend, self.target))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -373,6 +385,10 @@ impl Step for RustAnalyzer {
let _guard = builder.msg_check("rust-analyzer artifacts", target, None);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("rust-analyzer", self.target))
}
}
/// Compiletest is implicitly "checked" when it gets built in order to run tests,
@ -432,6 +448,10 @@ impl Step for Compiletest {
let _guard = builder.msg_check("compiletest artifacts", self.target, None);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("compiletest", self.target))
}
}
macro_rules! tool_check_step {
@ -467,6 +487,10 @@ macro_rules! tool_check_step {
let Self { target } = self;
run_tool_check_step(builder, target, stringify!($name), $path);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check(stringify!($name), self.target))
}
}
}
}

View file

@ -306,11 +306,7 @@ impl Step for Std {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(
StepMetadata::build("std", self.target)
.built_by(self.compiler)
.stage(self.compiler.stage),
)
Some(StepMetadata::build("std", self.target).built_by(self.compiler))
}
}
@ -1186,11 +1182,7 @@ impl Step for Rustc {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(
StepMetadata::build("rustc", self.target)
.built_by(self.build_compiler)
.stage(self.build_compiler.stage + 1),
)
Some(StepMetadata::build("rustc", self.target).built_by(self.build_compiler))
}
}

View file

@ -1108,7 +1108,9 @@ impl Step for Tidy {
if builder.config.cmd.bless() {
cmd.arg("--bless");
}
if let Some(s) = builder.config.cmd.extra_checks() {
if let Some(s) =
builder.config.cmd.extra_checks().or(builder.config.tidy_extra_checks.as_deref())
{
cmd.arg(format!("--extra-checks={s}"));
}
let mut args = std::env::args_os();

View file

@ -1195,7 +1195,6 @@ macro_rules! tool_extended {
Some(
StepMetadata::build($tool_name, self.target)
.built_by(self.compiler.with_stage(self.compiler.stage.saturating_sub(1)))
.stage(self.compiler.stage)
)
}
}

View file

@ -153,6 +153,10 @@ impl StepMetadata {
Self::new(name, target, Kind::Build)
}
pub fn check(name: &'static str, target: TargetSelection) -> Self {
Self::new(name, target, Kind::Check)
}
pub fn doc(name: &'static str, target: TargetSelection) -> Self {
Self::new(name, target, Kind::Doc)
}
@ -178,6 +182,14 @@ impl StepMetadata {
self.stage = Some(stage);
self
}
pub fn get_stage(&self) -> Option<u32> {
self.stage.or(self
.built_by
// For std, its stage corresponds to the stage of the compiler that builds it.
// For everything else, a stage N things gets built by a stage N-1 compiler.
.map(|compiler| if self.name == "std" { compiler.stage } else { compiler.stage + 1 }))
}
}
pub struct RunConfig<'a> {

View file

@ -863,7 +863,7 @@ mod snapshot {
insta::assert_snapshot!(
ctx.config("build")
.path("opt-dist")
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist <host>");
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist 1 <host>");
}
#[test]
@ -880,7 +880,7 @@ mod snapshot {
ctx.config("build")
.path("opt-dist")
.stage(1)
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist <host>");
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist 1 <host>");
}
#[test]
@ -890,7 +890,7 @@ mod snapshot {
ctx.config("build")
.path("opt-dist")
.stage(2)
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist <host>");
.render_steps(), @"[build] rustc 0 <host> -> OptimizedDist 1 <host>");
}
#[test]
@ -984,8 +984,8 @@ mod snapshot {
ctx
.config("dist")
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
@ -993,14 +993,14 @@ mod snapshot {
[build] rustdoc 1 <host>
[doc] std 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <host>
[doc] std 2 <host>
[dist] mingw <host>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <host>
[dist] rustc 1 <host> -> std <host>
[dist] rustc 1 <host> -> std 1 <host>
[dist] src <>
"
);
@ -1014,25 +1014,25 @@ mod snapshot {
.config("dist")
.args(&["--set", "build.extended=true"])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 0 <host> -> WasmComponentLd <host>
[build] rustc 0 <host> -> WasmComponentLd 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 1 <host> -> WasmComponentLd <host>
[build] rustc 1 <host> -> WasmComponentLd 2 <host>
[build] rustdoc 1 <host>
[doc] std 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <host>
[doc] std 2 <host>
[dist] mingw <host>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <host>
[dist] rustc 1 <host> -> std <host>
[dist] rustc 1 <host> -> std 1 <host>
[dist] src <>
[build] rustc 0 <host> -> rustfmt 1 <host>
[build] rustc 0 <host> -> cargo-fmt 1 <host>
@ -1052,8 +1052,8 @@ mod snapshot {
.hosts(&[&host_target()])
.targets(&[&host_target(), TEST_TRIPLE_1])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
@ -1062,19 +1062,19 @@ mod snapshot {
[doc] std 2 <host>
[doc] std 2 <target1>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <host>
[dist] docs <target1>
[doc] std 2 <host>
[doc] std 2 <target1>
[dist] mingw <host>
[dist] mingw <target1>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <host>
[dist] rustc 1 <host> -> std <host>
[dist] rustc 1 <host> -> std 1 <host>
[build] rustc 2 <host> -> std 2 <target1>
[dist] rustc 2 <host> -> std <target1>
[dist] rustc 2 <host> -> std 2 <target1>
[dist] src <>
"
);
@ -1089,8 +1089,8 @@ mod snapshot {
.hosts(&[&host_target(), TEST_TRIPLE_1])
.targets(&[&host_target()])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
@ -1098,20 +1098,20 @@ mod snapshot {
[build] rustdoc 1 <host>
[doc] std 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 1 <host> -> std 1 <target1>
[build] rustc 2 <host> -> std 2 <target1>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <host>
[doc] std 2 <host>
[dist] mingw <host>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <host>
[build] llvm <target1>
[build] rustc 1 <host> -> rustc 2 <target1>
[build] rustdoc 1 <target1>
[dist] rustc <target1>
[dist] rustc 1 <host> -> std <host>
[dist] rustc 1 <host> -> std 1 <host>
[dist] src <>
"
);
@ -1126,8 +1126,8 @@ mod snapshot {
.hosts(&[&host_target(), TEST_TRIPLE_1])
.targets(&[&host_target(), TEST_TRIPLE_1])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
@ -1136,24 +1136,24 @@ mod snapshot {
[doc] std 2 <host>
[doc] std 2 <target1>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 1 <host> -> std 1 <target1>
[build] rustc 2 <host> -> std 2 <target1>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <host>
[dist] docs <target1>
[doc] std 2 <host>
[doc] std 2 <target1>
[dist] mingw <host>
[dist] mingw <target1>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <host>
[build] llvm <target1>
[build] rustc 1 <host> -> rustc 2 <target1>
[build] rustdoc 1 <target1>
[dist] rustc <target1>
[dist] rustc 1 <host> -> std <host>
[dist] rustc 1 <host> -> std <target1>
[dist] rustc 1 <host> -> std 1 <host>
[dist] rustc 1 <host> -> std 1 <target1>
[dist] src <>
"
);
@ -1168,8 +1168,8 @@ mod snapshot {
.hosts(&[])
.targets(&[TEST_TRIPLE_1])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
@ -1177,12 +1177,12 @@ mod snapshot {
[build] rustdoc 1 <host>
[doc] std 2 <target1>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <target1>
[doc] std 2 <target1>
[dist] mingw <target1>
[build] rustc 2 <host> -> std 2 <target1>
[dist] rustc 2 <host> -> std <target1>
[dist] rustc 2 <host> -> std 2 <target1>
");
}
@ -1198,31 +1198,31 @@ mod snapshot {
.targets(&[TEST_TRIPLE_1])
.args(&["--set", "rust.channel=nightly", "--set", "build.extended=true"])
.render_steps(), @r"
[build] rustc 0 <host> -> UnstableBookGen <host>
[build] rustc 0 <host> -> Rustbook <host>
[build] rustc 0 <host> -> UnstableBookGen 1 <host>
[build] rustc 0 <host> -> Rustbook 1 <host>
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 0 <host> -> WasmComponentLd <host>
[build] rustc 0 <host> -> WasmComponentLd 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 1 <host> -> WasmComponentLd <host>
[build] rustc 1 <host> -> WasmComponentLd 2 <host>
[build] rustdoc 1 <host>
[doc] std 2 <target1>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 1 <host> -> std 1 <target1>
[build] rustc 2 <host> -> std 2 <target1>
[build] rustc 0 <host> -> LintDocs <host>
[build] rustc 0 <host> -> RustInstaller <host>
[build] rustc 0 <host> -> LintDocs 1 <host>
[build] rustc 0 <host> -> RustInstaller 1 <host>
[dist] docs <target1>
[doc] std 2 <target1>
[dist] mingw <target1>
[build] llvm <target1>
[build] rustc 1 <host> -> rustc 2 <target1>
[build] rustc 1 <host> -> WasmComponentLd <target1>
[build] rustc 1 <host> -> WasmComponentLd 2 <target1>
[build] rustdoc 1 <target1>
[build] rustc 0 <host> -> GenerateCopyright <host>
[build] rustc 0 <host> -> GenerateCopyright 1 <host>
[dist] rustc <target1>
[dist] rustc 1 <host> -> std <target1>
[dist] rustc 1 <host> -> std 1 <target1>
[dist] src <>
[build] rustc 0 <host> -> rustfmt 1 <target1>
[build] rustc 0 <host> -> cargo-fmt 1 <target1>
@ -1233,6 +1233,289 @@ mod snapshot {
");
}
#[test]
fn check_compiler_no_explicit_stage() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("compiler")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] cranelift <host>
[check] gcc <host>
");
insta::assert_snapshot!(
ctx.config("check")
.path("rustc")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
");
}
#[test]
fn check_compiler_stage_0() {
let ctx = TestCtx::new();
ctx.config("check").path("compiler").stage(0).run();
}
#[test]
fn check_compiler_stage_1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("compiler")
.stage(1)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[check] rustc <host>
[check] cranelift <host>
[check] gcc <host>
");
}
#[test]
fn check_compiler_stage_2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("compiler")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[check] rustc <host>
[check] cranelift <host>
[check] gcc <host>
");
}
#[test]
fn check_cross_compile() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.stage(2)
.targets(&[TEST_TRIPLE_1])
.hosts(&[TEST_TRIPLE_1])
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[build] rustc 1 <host> -> std 1 <target1>
[build] rustc 2 <host> -> std 2 <target1>
[check] rustc <target1>
[check] Rustdoc <target1>
[check] cranelift <target1>
[check] gcc <target1>
[check] Clippy <target1>
[check] Miri <target1>
[check] CargoMiri <target1>
[check] MiroptTestTools <target1>
[check] Rustfmt <target1>
[check] rust-analyzer <target1>
[check] TestFloatParse <target1>
[check] FeaturesStatusDump <target1>
[check] std <target1>
");
}
#[test]
fn check_library_no_explicit_stage() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("library")
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[check] std <host>
");
}
#[test]
fn check_library_stage_0() {
let ctx = TestCtx::new();
ctx.config("check").path("library").stage(0).run();
}
#[test]
fn check_library_stage_1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("library")
.stage(1)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[check] std <host>
");
}
#[test]
fn check_library_stage_2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("library")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[check] std <host>
");
}
#[test]
fn check_library_cross_compile() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.paths(&["core", "alloc", "std"])
.targets(&[TEST_TRIPLE_1, TEST_TRIPLE_2])
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[check] std <target1>
[check] std <target2>
");
}
#[test]
fn check_miri_no_explicit_stage() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("miri")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] Miri <host>
");
}
#[test]
fn check_miri_stage_0() {
let ctx = TestCtx::new();
ctx.config("check").path("miri").stage(0).run();
}
#[test]
fn check_miri_stage_1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("miri")
.stage(1)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[check] rustc <host>
[check] Miri <host>
");
}
#[test]
fn check_miri_stage_2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("miri")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 2 <host> -> std 2 <host>
[check] rustc <host>
[check] Miri <host>
");
}
#[test]
fn check_compiletest() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("compiletest")
.render_steps(), @"[check] compiletest <host>");
}
#[test]
fn check_compiletest_stage1_libtest() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("compiletest")
.args(&["--set", "build.compiletest-use-stage0-libtest=false"])
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] compiletest <host>
");
}
#[test]
fn check_codegen() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("rustc_codegen_cranelift")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] cranelift <host>
[check] gcc <host>
");
}
#[test]
fn check_rust_analyzer() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("rust-analyzer")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] rust-analyzer <host>
");
}
#[test]
fn check_bootstrap_tool() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("check")
.path("run-make-support")
.render_steps(), @r"
[check] std <host>
[build] llvm <host>
[check] rustc <host>
[check] RunMakeSupport <host>
");
}
#[test]
fn test_exclude() {
let ctx = TestCtx::new();
@ -1384,7 +1667,7 @@ fn render_metadata(metadata: &StepMetadata) -> String {
if let Some(compiler) = metadata.built_by {
write!(record, "{} -> ", render_compiler(compiler));
}
let stage = if let Some(stage) = metadata.stage { format!("{stage} ") } else { "".to_string() };
let stage = metadata.get_stage().map(|stage| format!("{stage} ")).unwrap_or_default();
write!(record, "{} {stage}<{}>", metadata.name, normalize_target(metadata.target));
record
}

View file

@ -297,7 +297,8 @@ pub struct Config {
/// Whether to use the precompiled stage0 libtest with compiletest.
pub compiletest_use_stage0_libtest: bool,
/// Default value for `--extra-checks`
pub tidy_extra_checks: Option<String>,
pub is_running_on_ci: bool,
/// Cache for determining path modifications
@ -744,6 +745,7 @@ impl Config {
jobs,
compiletest_diff_tool,
compiletest_use_stage0_libtest,
tidy_extra_checks,
mut ccache,
exclude,
} = toml.build.unwrap_or_default();
@ -1010,6 +1012,7 @@ impl Config {
optimized_compiler_builtins.unwrap_or(config.channel != "dev");
config.compiletest_diff_tool = compiletest_diff_tool;
config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true);
config.tidy_extra_checks = tidy_extra_checks;
let download_rustc = config.download_rustc_commit.is_some();
config.explicit_stage_from_cli = flags_stage.is_some();

View file

@ -69,6 +69,7 @@ define_config! {
jobs: Option<u32> = "jobs",
compiletest_diff_tool: Option<String> = "compiletest-diff-tool",
compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest",
tidy_extra_checks: Option<String> = "tidy-extra-checks",
ccache: Option<StringOrBool> = "ccache",
exclude: Option<Vec<PathBuf>> = "exclude",
}

View file

@ -393,6 +393,27 @@ pub fn check_incompatible_options_for_ci_rustc(
Ok(())
}
pub(crate) const VALID_CODEGEN_BACKENDS: &[&str] = &["llvm", "cranelift", "gcc"];
pub(crate) fn validate_codegen_backends(backends: Vec<String>, section: &str) -> Vec<String> {
for backend in &backends {
if let Some(stripped) = backend.strip_prefix(CODEGEN_BACKEND_PREFIX) {
panic!(
"Invalid value '{backend}' for '{section}.codegen-backends'. \
Codegen backends are defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \
Please, use '{stripped}' instead."
)
}
if !VALID_CODEGEN_BACKENDS.contains(&backend.as_str()) {
println!(
"HELP: '{backend}' for '{section}.codegen-backends' might fail. \
List of known good values: {VALID_CODEGEN_BACKENDS:?}"
);
}
}
backends
}
impl Config {
pub fn apply_rust_config(
&mut self,
@ -571,24 +592,10 @@ impl Config {
set(&mut self.ehcont_guard, ehcont_guard);
self.llvm_libunwind_default =
llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind"));
if let Some(ref backends) = codegen_backends {
let available_backends = ["llvm", "cranelift", "gcc"];
self.rust_codegen_backends = backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
if available_backends.contains(&backend) {
panic!("Invalid value '{s}' for 'rust.codegen-backends'. Instead, please use '{backend}'.");
} else {
println!("HELP: '{s}' for 'rust.codegen-backends' might fail. \
Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \
In this case, it would be referred to as '{backend}'.");
}
}
s.clone()
}).collect();
}
set(
&mut self.rust_codegen_backends,
codegen_backends.map(|backends| validate_codegen_backends(backends, "rust")),
);
self.rust_codegen_units = codegen_units.map(threads_from_config);
self.rust_codegen_units_std = codegen_units_std.map(threads_from_config);

View file

@ -16,7 +16,7 @@ use std::collections::HashMap;
use serde::{Deserialize, Deserializer};
use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX;
use crate::core::config::toml::rust::validate_codegen_backends;
use crate::core::config::{LlvmLibunwind, Merge, ReplaceOpt, SplitDebuginfo, StringOrBool};
use crate::{Config, HashSet, PathBuf, TargetSelection, define_config, exit};
@ -142,23 +142,9 @@ impl Config {
target.rpath = cfg.rpath;
target.optimized_compiler_builtins = cfg.optimized_compiler_builtins;
target.jemalloc = cfg.jemalloc;
if let Some(ref backends) = cfg.codegen_backends {
let available_backends = ["llvm", "cranelift", "gcc"];
target.codegen_backends = Some(backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
if available_backends.contains(&backend) {
panic!("Invalid value '{s}' for 'target.{triple}.codegen-backends'. Instead, please use '{backend}'.");
} else {
println!("HELP: '{s}' for 'target.{triple}.codegen-backends' might fail. \
Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \
In this case, it would be referred to as '{backend}'.");
}
}
s.clone()
}).collect());
if let Some(backends) = cfg.codegen_backends {
target.codegen_backends =
Some(validate_codegen_backends(backends, &format!("target.{triple}")))
}
target.split_debuginfo = cfg.split_debuginfo.as_ref().map(|v| {

View file

@ -1,12 +1,13 @@
use std::path::PathBuf;
use crate::utils::cache::{INTERNER, Internable, TyIntern};
use crate::utils::cache::{INTERNER, Internable, Interner, TyIntern};
#[test]
fn test_string_interning() {
let s1 = INTERNER.intern_str("Hello");
let s2 = INTERNER.intern_str("Hello");
let s3 = INTERNER.intern_str("world");
let interner = Interner::default();
let s1 = interner.intern_str("Hello");
let s2 = interner.intern_str("Hello");
let s3 = interner.intern_str("world");
assert_eq!(s1, s2, "Same strings should be interned to the same instance");
assert_ne!(s1, s3, "Different strings should have different interned values");
@ -14,6 +15,8 @@ fn test_string_interning() {
#[test]
fn test_interned_equality() {
// Because we compare with &str, and the Deref impl accesses the global
// INTERNER variable, we cannot use a local Interner variable here.
let s1 = INTERNER.intern_str("test");
let s2 = INTERNER.intern_str("test");

View file

@ -441,4 +441,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Warning,
summary: "`llvm.lld` is no longer enabled by default for the dist profile.",
},
ChangeInfo {
change_id: 143251,
severity: ChangeSeverity::Info,
summary: "Added new option `build.tidy-extra-checks` to specify a default value for the --extra-checks cli flag.",
},
];

View file

@ -66,6 +66,8 @@ pub struct JobDatabase {
pub try_jobs: Vec<Job>,
#[serde(rename = "auto")]
pub auto_jobs: Vec<Job>,
#[serde(rename = "optional")]
pub optional_jobs: Vec<Job>,
/// Shared environments for the individual run types.
envs: JobEnvironments,
@ -75,9 +77,10 @@ impl JobDatabase {
/// Find `auto` jobs that correspond to the passed `pattern`.
/// Patterns are matched using the glob syntax.
/// For example `dist-*` matches all jobs starting with `dist-`.
fn find_auto_jobs_by_pattern(&self, pattern: &str) -> Vec<Job> {
fn find_auto_or_optional_jobs_by_pattern(&self, pattern: &str) -> Vec<Job> {
self.auto_jobs
.iter()
.chain(self.optional_jobs.iter())
.filter(|j| glob_match::glob_match(pattern, &j.name))
.cloned()
.collect()
@ -181,7 +184,7 @@ fn calculate_jobs(
let mut jobs: Vec<Job> = vec![];
let mut unknown_patterns = vec![];
for pattern in patterns {
let matched_jobs = db.find_auto_jobs_by_pattern(pattern);
let matched_jobs = db.find_auto_or_optional_jobs_by_pattern(pattern);
if matched_jobs.is_empty() {
unknown_patterns.push(pattern.clone());
} else {

View file

@ -46,6 +46,13 @@ auto:
- name: test-msvc-i686-2
os: ubuntu
env: {}
optional:
- name: optional-job-1
os: ubuntu
env: {}
- name: optional-dist-x86_64
os: ubuntu
env: {}
"#,
)
.unwrap();
@ -57,12 +64,18 @@ auto:
"*i686*",
&["test-i686", "dist-i686", "test-msvc-i686-1", "test-msvc-i686-2"],
);
// Test that optional jobs are found
check_pattern(&db, "optional-*", &["optional-job-1", "optional-dist-x86_64"]);
check_pattern(&db, "*optional*", &["optional-job-1", "optional-dist-x86_64"]);
}
#[track_caller]
fn check_pattern(db: &JobDatabase, pattern: &str, expected: &[&str]) {
let jobs =
db.find_auto_jobs_by_pattern(pattern).into_iter().map(|j| j.name).collect::<Vec<_>>();
let jobs = db
.find_auto_or_optional_jobs_by_pattern(pattern)
.into_iter()
.map(|j| j.name)
.collect::<Vec<_>>();
assert_eq!(jobs, expected);
}
@ -116,8 +129,13 @@ fn validate_jobs() {
load_job_db(&db_str).expect("Failed to load job database")
};
let all_jobs =
db.pr_jobs.iter().chain(db.try_jobs.iter()).chain(db.auto_jobs.iter()).collect::<Vec<_>>();
let all_jobs = db
.pr_jobs
.iter()
.chain(db.try_jobs.iter())
.chain(db.auto_jobs.iter())
.chain(db.optional_jobs.iter())
.collect::<Vec<_>>();
let errors: Vec<anyhow::Error> =
all_jobs.into_iter().filter_map(|job| validate_codebuild_image(job).err()).collect();

View file

@ -139,3 +139,8 @@ auto:
DIST_REQUIRE_ALL_TOOLS: 1
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-windows
# Jobs that only run when explicitly invoked via `@bors try`.
optional:
- name: test-optional-job
<<: *job-linux-4c

View file

@ -26,6 +26,5 @@ ENV RUST_CONFIGURE_ARGS \
--enable-sanitizers \
--enable-profiler \
--enable-compiler-docs
# FIXME: Skipping cargo panic_abort_doc_tests due to https://github.com/rust-lang/rust/issues/123733
ENV SCRIPT python3 ../x.py --stage 2 test && \
python3 ../x.py --stage 2 test src/tools/cargo --test-args \"--skip panic_abort_doc_tests\"
python3 ../x.py --stage 2 test src/tools/cargo

View file

@ -160,6 +160,17 @@ pr:
try:
- <<: *job-dist-x86_64-linux
# Jobs that only run when explicitly invoked in one of the following ways:
# - comment `@bors2 try jobs=<job-name>`
# - `try-job: <job-name>` in the PR description and comment `@bors try` or `@bors2 try`.
optional:
# This job is used just to test optional jobs.
# It will be replaced by tier 2 and tier 3 jobs in the future.
- name: optional-mingw-check-1
env:
IMAGE: mingw-check-1
<<: *job-linux-4c
# Main CI jobs that have to be green to merge a commit into master
# These jobs automatically inherit envs.auto, to avoid repeating
# it in each job definition.

View file

@ -8,6 +8,8 @@
3. Copy the filenames with updated suffixes from the directory.
*/
/* ignore-tidy-filelength */
:root {
--nav-sub-mobile-padding: 8px;
--search-typename-width: 6.75rem;
@ -915,32 +917,30 @@ ul.block, .block li, .block ul {
overflow: auto;
}
.example-wrap.digits-1:not(.hide-lines) [data-nosnippet] {
width: calc(1ch + var(--line-number-padding) * 2);
.example-wrap code {
position: relative;
}
.example-wrap.digits-2:not(.hide-lines) [data-nosnippet] {
width: calc(2ch + var(--line-number-padding) * 2);
.example-wrap pre code span {
display: inline;
}
.example-wrap.digits-3:not(.hide-lines) [data-nosnippet] {
width: calc(3ch + var(--line-number-padding) * 2);
.example-wrap.digits-1 { --example-wrap-digits-count: 1ch; }
.example-wrap.digits-2 { --example-wrap-digits-count: 2ch; }
.example-wrap.digits-3 { --example-wrap-digits-count: 3ch; }
.example-wrap.digits-4 { --example-wrap-digits-count: 4ch; }
.example-wrap.digits-5 { --example-wrap-digits-count: 5ch; }
.example-wrap.digits-6 { --example-wrap-digits-count: 6ch; }
.example-wrap.digits-7 { --example-wrap-digits-count: 7ch; }
.example-wrap.digits-8 { --example-wrap-digits-count: 8ch; }
.example-wrap.digits-9 { --example-wrap-digits-count: 9ch; }
.example-wrap [data-nosnippet] {
width: calc(var(--example-wrap-digits-count) + var(--line-number-padding) * 2);
}
.example-wrap.digits-4:not(.hide-lines) [data-nosnippet] {
width: calc(4ch + var(--line-number-padding) * 2);
}
.example-wrap.digits-5:not(.hide-lines) [data-nosnippet] {
width: calc(5ch + var(--line-number-padding) * 2);
}
.example-wrap.digits-6:not(.hide-lines) [data-nosnippet] {
width: calc(6ch + var(--line-number-padding) * 2);
}
.example-wrap.digits-7:not(.hide-lines) [data-nosnippet] {
width: calc(7ch + var(--line-number-padding) * 2);
}
.example-wrap.digits-8:not(.hide-lines) [data-nosnippet] {
width: calc(8ch + var(--line-number-padding) * 2);
}
.example-wrap.digits-9:not(.hide-lines) [data-nosnippet] {
width: calc(9ch + var(--line-number-padding) * 2);
.example-wrap pre > code {
padding-left: calc(
var(--example-wrap-digits-count) + var(--line-number-padding) * 2
+ var(--line-number-right-margin));
}
.example-wrap [data-nosnippet] {
@ -953,63 +953,25 @@ ul.block, .block li, .block ul {
-ms-user-select: none;
user-select: none;
padding: 0 var(--line-number-padding);
}
.example-wrap [data-nosnippet]:target {
border-right: none;
position: absolute;
left: 0;
}
.example-wrap .line-highlighted[data-nosnippet] {
background-color: var(--src-line-number-highlighted-background-color);
}
:root.word-wrap-source-code .example-wrap [data-nosnippet] {
position: absolute;
left: 0;
}
.word-wrap-source-code .example-wrap pre > code {
.example-wrap pre > code {
position: relative;
word-break: break-all;
display: block;
}
:root.word-wrap-source-code .example-wrap pre > code {
display: block;
word-break: break-all;
white-space: pre-wrap;
}
:root.word-wrap-source-code .example-wrap pre > code * {
word-break: break-all;
}
:root.word-wrap-source-code .example-wrap.digits-1 pre > code {
padding-left: calc(
1ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-2 pre > code {
padding-left: calc(
2ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-3 pre > code {
padding-left: calc(
3ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-4 pre > code {
padding-left: calc(
4ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-5 pre > code {
padding-left: calc(
5ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-6 pre > code {
padding-left: calc(
6ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-7 pre > code {
padding-left: calc(
7ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-8 pre > code {
padding-left: calc(
8ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
}
:root.word-wrap-source-code .example-wrap.digits-9 pre > code {
padding-left: calc(
9ch + var(--line-number-padding) * 2 + var(--line-number-right-margin));
.example-wrap [data-nosnippet]:target {
border-right: none;
}
.example-wrap.hide-lines [data-nosnippet] {
display: none;

View file

@ -886,13 +886,13 @@ pub fn eq_generic_param(l: &GenericParam, r: &GenericParam) -> bool {
(
Const {
ty: lt,
kw_span: _,
default: ld,
span: _,
},
Const {
ty: rt,
kw_span: _,
default: rd,
span: _,
},
) => eq_ty(lt, rt) && both(ld.as_ref(), rd.as_ref(), eq_anon_const),
_ => false,

View file

@ -98,6 +98,7 @@ fn validate_diag(diag: &Diag<'_, impl EmissionGuarantee>) {
/// 17 | std::mem::forget(seven);
/// | ^^^^^^^^^^^^^^^^^^^^^^^
/// ```
#[track_caller]
pub fn span_lint<T: LintContext>(cx: &T, lint: &'static Lint, sp: impl Into<MultiSpan>, msg: impl Into<DiagMessage>) {
#[expect(clippy::disallowed_methods)]
cx.span_lint(lint, sp, |diag| {
@ -143,6 +144,7 @@ pub fn span_lint<T: LintContext>(cx: &T, lint: &'static Lint, sp: impl Into<Mult
/// |
/// = help: consider using `f64::NAN` if you would like a constant representing NaN
/// ```
#[track_caller]
pub fn span_lint_and_help<T: LintContext>(
cx: &T,
lint: &'static Lint,
@ -203,6 +205,7 @@ pub fn span_lint_and_help<T: LintContext>(
/// 10 | forget(&SomeStruct);
/// | ^^^^^^^^^^^
/// ```
#[track_caller]
pub fn span_lint_and_note<T: LintContext>(
cx: &T,
lint: &'static Lint,
@ -244,6 +247,7 @@ pub fn span_lint_and_note<T: LintContext>(
/// If you're unsure which function you should use, you can test if the `#[expect]` attribute works
/// where you would expect it to.
/// If it doesn't, you likely need to use [`span_lint_hir_and_then`] instead.
#[track_caller]
pub fn span_lint_and_then<C, S, M, F>(cx: &C, lint: &'static Lint, sp: S, msg: M, f: F)
where
C: LintContext,
@ -286,6 +290,7 @@ where
/// Instead, use this function and also pass the `HirId` of `<expr_1>`, which will let
/// the compiler check lint level attributes at the place of the expression and
/// the `#[allow]` will work.
#[track_caller]
pub fn span_lint_hir(cx: &LateContext<'_>, lint: &'static Lint, hir_id: HirId, sp: Span, msg: impl Into<DiagMessage>) {
#[expect(clippy::disallowed_methods)]
cx.tcx.node_span_lint(lint, hir_id, sp, |diag| {
@ -321,6 +326,7 @@ pub fn span_lint_hir(cx: &LateContext<'_>, lint: &'static Lint, hir_id: HirId, s
/// Instead, use this function and also pass the `HirId` of `<expr_1>`, which will let
/// the compiler check lint level attributes at the place of the expression and
/// the `#[allow]` will work.
#[track_caller]
pub fn span_lint_hir_and_then(
cx: &LateContext<'_>,
lint: &'static Lint,
@ -374,6 +380,7 @@ pub fn span_lint_hir_and_then(
/// = note: `-D fold-any` implied by `-D warnings`
/// ```
#[cfg_attr(not(debug_assertions), expect(clippy::collapsible_span_lint_calls))]
#[track_caller]
pub fn span_lint_and_sugg<T: LintContext>(
cx: &T,
lint: &'static Lint,

View file

@ -0,0 +1,22 @@
//@compile-flags: -Z track-diagnostics
//@no-rustfix
// Normalize the emitted location so this doesn't need
// updating everytime someone adds or removes a line.
//@normalize-stderr-test: ".rs:\d+:\d+" -> ".rs:LL:CC"
#![warn(clippy::let_and_return, clippy::unnecessary_cast)]
fn main() {
// Check the provenance of a lint sent through `LintContext::span_lint()`
let a = 3u32;
let b = a as u32;
//~^ unnecessary_cast
// Check the provenance of a lint sent through `TyCtxt::node_span_lint()`
let c = {
let d = 42;
d
//~^ let_and_return
};
}

View file

@ -0,0 +1,29 @@
error: casting to the same type is unnecessary (`u32` -> `u32`)
--> tests/ui/track-diagnostics-clippy.rs:LL:CC
|
LL | let b = a as u32;
| ^^^^^^^^ help: try: `a`
-Ztrack-diagnostics: created at src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs:LL:CC
|
= note: `-D clippy::unnecessary-cast` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::unnecessary_cast)]`
error: returning the result of a `let` binding from a block
--> tests/ui/track-diagnostics-clippy.rs:LL:CC
|
LL | let d = 42;
| ----------- unnecessary `let` binding
LL | d
| ^
-Ztrack-diagnostics: created at src/tools/clippy/clippy_lints/src/returns.rs:LL:CC
|
= note: `-D clippy::let-and-return` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::let_and_return)]`
help: return the expression directly
|
LL ~
LL ~ 42
|
error: aborting due to 2 previous errors

View file

@ -1,6 +1,6 @@
/// This was originally generated by collecting directives from ui tests and then extracting their
/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is
/// a best-effort approximation for diagnostics. Add new headers to this list when needed.
/// a best-effort approximation for diagnostics. Add new directives to this list when needed.
const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
// tidy-alphabetical-start
"add-core-stubs",

View file

@ -11,10 +11,10 @@ use tracing::*;
use crate::common::{Config, Debugger, FailMode, Mode, PassMode};
use crate::debuggers::{extract_cdb_version, extract_gdb_version};
use crate::directives::auxiliary::{AuxProps, parse_and_update_aux};
use crate::directives::needs::CachedNeedsConditions;
use crate::errors::ErrorKind;
use crate::executor::{CollectedTestDesc, ShouldPanic};
use crate::header::auxiliary::{AuxProps, parse_and_update_aux};
use crate::header::needs::CachedNeedsConditions;
use crate::help;
use crate::util::static_regex;
@ -24,11 +24,11 @@ mod needs;
#[cfg(test)]
mod tests;
pub struct HeadersCache {
pub struct DirectivesCache {
needs: CachedNeedsConditions,
}
impl HeadersCache {
impl DirectivesCache {
pub fn load(config: &Config) -> Self {
Self { needs: CachedNeedsConditions::load(config) }
}
@ -54,7 +54,7 @@ impl EarlyProps {
pub fn from_reader<R: Read>(config: &Config, testfile: &Utf8Path, rdr: R) -> Self {
let mut props = EarlyProps::default();
let mut poisoned = false;
iter_header(
iter_directives(
config.mode,
&config.suite,
&mut poisoned,
@ -138,12 +138,12 @@ pub struct TestProps {
pub incremental_dir: Option<Utf8PathBuf>,
// If `true`, this test will use incremental compilation.
//
// This can be set manually with the `incremental` header, or implicitly
// This can be set manually with the `incremental` directive, or implicitly
// by being a part of an incremental mode test. Using the `incremental`
// header should be avoided if possible; using an incremental mode test is
// directive should be avoided if possible; using an incremental mode test is
// preferred. Incremental mode tests support multiple passes, which can
// verify that the incremental cache can be loaded properly after being
// created. Just setting the header will only verify the behavior with
// created. Just setting the directive will only verify the behavior with
// creating an incremental cache, but doesn't check that it is created
// correctly.
//
@ -347,7 +347,7 @@ impl TestProps {
let mut poisoned = false;
iter_header(
iter_directives(
config.mode,
&config.suite,
&mut poisoned,
@ -642,11 +642,11 @@ impl TestProps {
let check_ui = |mode: &str| {
// Mode::Crashes may need build-fail in order to trigger llvm errors or stack overflows
if config.mode != Mode::Ui && config.mode != Mode::Crashes {
panic!("`{}-fail` header is only supported in UI tests", mode);
panic!("`{}-fail` directive is only supported in UI tests", mode);
}
};
if config.mode == Mode::Ui && config.parse_name_directive(ln, "compile-fail") {
panic!("`compile-fail` header is useless in UI tests");
panic!("`compile-fail` directive is useless in UI tests");
}
let fail_mode = if config.parse_name_directive(ln, "check-fail") {
check_ui("check");
@ -662,7 +662,7 @@ impl TestProps {
};
match (self.fail_mode, fail_mode) {
(None, Some(_)) => self.fail_mode = fail_mode,
(Some(_), Some(_)) => panic!("multiple `*-fail` headers in a single test"),
(Some(_), Some(_)) => panic!("multiple `*-fail` directives in a single test"),
(_, None) => {}
}
}
@ -674,10 +674,10 @@ impl TestProps {
(Mode::Codegen, "build-pass") => (),
(Mode::Incremental, _) => {
if revision.is_some() && !self.revisions.iter().all(|r| r.starts_with("cfail")) {
panic!("`{s}` header is only supported in `cfail` incremental tests")
panic!("`{s}` directive is only supported in `cfail` incremental tests")
}
}
(mode, _) => panic!("`{s}` header is not supported in `{mode}` tests"),
(mode, _) => panic!("`{s}` directive is not supported in `{mode}` tests"),
};
let pass_mode = if config.parse_name_directive(ln, "check-pass") {
check_no_run("check-pass");
@ -693,7 +693,7 @@ impl TestProps {
};
match (self.pass_mode, pass_mode) {
(None, Some(_)) => self.pass_mode = pass_mode,
(Some(_), Some(_)) => panic!("multiple `*-pass` headers in a single test"),
(Some(_), Some(_)) => panic!("multiple `*-pass` directives in a single test"),
(_, None) => {}
}
}
@ -794,7 +794,7 @@ const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] =
&["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"];
/// The (partly) broken-down contents of a line containing a test directive,
/// which [`iter_header`] passes to its callback function.
/// which [`iter_directives`] passes to its callback function.
///
/// For example:
///
@ -867,7 +867,7 @@ pub(crate) fn check_directive<'a>(
const COMPILETEST_DIRECTIVE_PREFIX: &str = "//@";
fn iter_header(
fn iter_directives(
mode: Mode,
_suite: &str,
poisoned: &mut bool,
@ -1163,8 +1163,7 @@ enum NormalizeKind {
Stderr64bit,
}
/// Parses the regex and replacement values of a `//@ normalize-*` header,
/// in the format:
/// Parses the regex and replacement values of a `//@ normalize-*` directive, in the format:
/// ```text
/// "REGEX" -> "REPLACEMENT"
/// ```
@ -1373,7 +1372,7 @@ where
pub(crate) fn make_test_description<R: Read>(
config: &Config,
cache: &HeadersCache,
cache: &DirectivesCache,
name: String,
path: &Utf8Path,
src: R,
@ -1387,7 +1386,7 @@ pub(crate) fn make_test_description<R: Read>(
let mut local_poisoned = false;
// Scan through the test file to handle `ignore-*`, `only-*`, and `needs-*` directives.
iter_header(
iter_directives(
config.mode,
&config.suite,
&mut local_poisoned,

View file

@ -3,8 +3,8 @@
use std::iter;
use super::directives::{AUX_BIN, AUX_BUILD, AUX_CODEGEN_BACKEND, AUX_CRATE, PROC_MACRO};
use crate::common::Config;
use crate::header::directives::{AUX_BIN, AUX_BUILD, AUX_CODEGEN_BACKEND, AUX_CRATE, PROC_MACRO};
/// Properties parsed from `aux-*` test directives.
#[derive(Clone, Debug, Default)]

Some files were not shown because too many files have changed in this diff Show more