Auto merge of #142574 - Kobzol:rollup-ldj386u, r=Kobzol

Rollup of 12 pull requests

Successful merges:

 - rust-lang/rust#141639 (Expose discriminant values in stable_mir)
 - rust-lang/rust#142082 (Refactor `rustc_attr_data_structures` documentation)
 - rust-lang/rust#142125 (Stabilize "file_lock" feature)
 - rust-lang/rust#142236 (Add documentation for `PathBuf`'s `FromIterator` and `Extend` impls)
 - rust-lang/rust#142373 (Fix Debug for Location)
 - rust-lang/rust#142416 (Assorted bootstrap cleanups (step 2))
 - rust-lang/rust#142431 (Add initial version of snapshot tests to bootstrap)
 - rust-lang/rust#142450 (Add documentation on top of `rustc_middle/src/query/mod.rs`)
 - rust-lang/rust#142528 (clarify `rustc_do_not_const_check` comment)
 - rust-lang/rust#142530 (use `if let` guards where possible)
 - rust-lang/rust#142561 (Remove an `njn:` comment accidentaly left behind.)
 - rust-lang/rust#142566 (Fix `-nopt` CI jobs)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-06-16 14:25:08 +00:00
commit 3bc767e1a2
40 changed files with 693 additions and 203 deletions

View file

@ -130,24 +130,54 @@ impl Deprecation {
}
}
/// Represent parsed, *built in*, inert attributes.
/// Represents parsed *built-in* inert attributes.
///
/// That means attributes that are not actually ever expanded.
/// For more information on this, see the module docs on the [`rustc_attr_parsing`] crate.
/// They're instead used as markers, to guide the compilation process in various way in most every stage of the compiler.
/// These are kept around after the AST, into the HIR and further on.
/// ## Overview
/// These attributes are markers that guide the compilation process and are never expanded into other code.
/// They persist throughout the compilation phases, from AST to HIR and beyond.
///
/// The word "parsed" could be a little misleading here, because the parser already parses
/// attributes early on. However, the result, an [`ast::Attribute`]
/// is only parsed at a high level, still containing a token stream in many cases. That is
/// because the structure of the contents varies from attribute to attribute.
/// With a parsed attribute I mean that each attribute is processed individually into a
/// final structure, which on-site (the place where the attribute is useful for, think the
/// the place where `must_use` is checked) little to no extra parsing or validating needs to
/// happen.
/// ## Attribute Processing
/// While attributes are initially parsed by [`rustc_parse`] into [`ast::Attribute`], they still contain raw token streams
/// because different attributes have different internal structures. This enum represents the final,
/// fully parsed form of these attributes, where each variant contains contains all the information and
/// structure relevant for the specific attribute.
///
/// For more docs, look in [`rustc_attr_parsing`].
/// Some attributes can be applied multiple times to the same item, and they are "collapsed" into a single
/// semantic attribute. For example:
/// ```rust
/// #[repr(C)]
/// #[repr(packed)]
/// struct S { }
/// ```
/// This is equivalent to `#[repr(C, packed)]` and results in a single [`AttributeKind::Repr`] containing
/// both `C` and `packed` annotations. This collapsing happens during parsing and is reflected in the
/// data structures defined in this enum.
///
/// ## Usage
/// These parsed attributes are used throughout the compiler to:
/// - Control code generation (e.g., `#[repr]`)
/// - Mark API stability (`#[stable]`, `#[unstable]`)
/// - Provide documentation (`#[doc]`)
/// - Guide compiler behavior (e.g., `#[allow_internal_unstable]`)
///
/// ## Note on Attribute Organization
/// Some attributes like `InlineAttr`, `OptimizeAttr`, and `InstructionSetAttr` are defined separately
/// from this enum because they are used in specific compiler phases (like code generation) and don't
/// need to persist throughout the entire compilation process. They are typically processed and
/// converted into their final form earlier in the compilation pipeline.
///
/// For example:
/// - `InlineAttr` is used during code generation to control function inlining
/// - `OptimizeAttr` is used to control optimization levels
/// - `InstructionSetAttr` is used for target-specific code generation
///
/// These attributes are handled by their respective compiler passes in the [`rustc_codegen_ssa`] crate
/// and don't need to be preserved in the same way as the attributes in this enum.
///
/// For more details on attribute parsing, see the [`rustc_attr_parsing`] crate.
///
/// [`rustc_parse`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_parse/index.html
/// [`rustc_codegen_ssa`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/index.html
/// [`rustc_attr_parsing`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_attr_parsing/index.html
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)]
pub enum AttributeKind {

View file

@ -1,3 +1,7 @@
//! Data structures for representing parsed attributes in the Rust compiler.
//! For detailed documentation about attribute processing,
//! see [rustc_attr_parsing](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_attr_parsing/index.html).
// tidy-alphabetical-start
#![allow(internal_features)]
#![doc(rust_logo)]

View file

@ -118,10 +118,7 @@ pub(crate) fn expand_test_or_bench(
let (item, is_stmt) = match item {
Annotatable::Item(i) => (i, false),
Annotatable::Stmt(stmt) if matches!(stmt.kind, ast::StmtKind::Item(_)) => {
// FIXME: Use an 'if let' guard once they are implemented
if let ast::StmtKind::Item(i) = stmt.kind { (i, true) } else { unreachable!() }
}
Annotatable::Stmt(box ast::Stmt { kind: ast::StmtKind::Item(i), .. }) => (i, true),
other => {
not_testable_error(cx, attr_sp, None);
return vec![other];

View file

@ -35,6 +35,7 @@ use crate::base::ast::MetaItemInner;
use crate::errors;
use crate::expand::{self, AstFragment, Invocation};
use crate::module::DirOwnership;
use crate::stats::MacroStat;
// When adding new variants, make sure to
// adjust the `visit_*` / `flat_map_*` calls in `InvocationCollector`
@ -1191,7 +1192,7 @@ pub struct ExtCtxt<'a> {
/// not to expand it again.
pub(super) expanded_inert_attrs: MarkedAttrs,
/// `-Zmacro-stats` data.
pub macro_stats: FxHashMap<(Symbol, MacroKind), crate::stats::MacroStat>, // njn: quals
pub macro_stats: FxHashMap<(Symbol, MacroKind), MacroStat>,
}
impl<'a> ExtCtxt<'a> {

View file

@ -887,7 +887,7 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_legacy_const_generics, Normal, template!(List: "N"), ErrorFollowing,
EncodeCrossCrate::Yes,
),
// Do not const-check this function's body. It will always get replaced during CTFE.
// Do not const-check this function's body. It will always get replaced during CTFE via `hook_special_const_fn`.
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing,
EncodeCrossCrate::Yes, "`#[rustc_do_not_const_check]` skips const-check for this function's body",

View file

@ -1,8 +1,64 @@
//! Defines the various compiler queries.
//!
//! For more information on the query system, see
//! ["Queries: demand-driven compilation"](https://rustc-dev-guide.rust-lang.org/query.html).
//! This chapter includes instructions for adding new queries.
//! # The rustc Query System: Query Definitions and Modifiers
//!
//! The core processes in rustc are shipped as queries. Each query is a demand-driven function from some key to a value.
//! The execution result of the function is cached and directly read during the next request, thereby improving compilation efficiency.
//! Some results are saved locally and directly read during the next compilation, which are core of incremental compilation.
//!
//! ## How to Read This Module
//!
//! Each `query` block in this file defines a single query, specifying its key and value types, along with various modifiers.
//! These query definitions are processed by the [`rustc_macros`], which expands them into the necessary boilerplate code
//! for the query system—including the [`Providers`] struct (a function table for all query implementations, where each field is
//! a function pointer to the actual provider), caching, and dependency graph integration.
//! **Note:** The `Providers` struct is not a Rust trait, but a struct generated by the `rustc_macros` to hold all provider functions.
//! The `rustc_macros` also supports a set of **query modifiers** (see below) that control the behavior of each query.
//!
//! The actual provider functions are implemented in various modules and registered into the `Providers` struct
//! during compiler initialization (see [`rustc_interface::passes::DEFAULT_QUERY_PROVIDERS`]).
//!
//! [`rustc_macros`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_macros/index.html
//! [`rustc_interface::passes::DEFAULT_QUERY_PROVIDERS`]: ../../rustc_interface/passes/static.DEFAULT_QUERY_PROVIDERS.html
//!
//! ## Query Modifiers
//!
//! Query modifiers are special flags that alter the behavior of a query. They are parsed and processed by the `rustc_macros`
//! The main modifiers are:
//!
//! - `desc { ... }`: Sets the human-readable description for diagnostics and profiling. Required for every query.
//! - `arena_cache`: Use an arena for in-memory caching of the query result.
//! - `cache_on_disk_if { ... }`: Cache the query result to disk if the provided block evaluates to true.
//! - `fatal_cycle`: If a dependency cycle is detected, abort compilation with a fatal error.
//! - `cycle_delay_bug`: If a dependency cycle is detected, emit a delayed bug instead of aborting immediately.
//! - `cycle_stash`: If a dependency cycle is detected, stash the error for later handling.
//! - `no_hash`: Do not hash the query result for incremental compilation; just mark as dirty if recomputed.
//! - `anon`: Make the query anonymous in the dependency graph (no dep node is created).
//! - `eval_always`: Always evaluate the query, ignoring its dependencies and cached results.
//! - `depth_limit`: Impose a recursion depth limit on the query to prevent stack overflows.
//! - `separate_provide_extern`: Use separate provider functions for local and external crates.
//! - `feedable`: Allow the query result to be set from another query ("fed" externally).
//! - `return_result_from_ensure_ok`: When called via `tcx.ensure_ok()`, return `Result<(), ErrorGuaranteed>` instead of `()`.
//! If the query needs to be executed and returns an error, the error is returned to the caller.
//! Only valid for queries returning `Result<_, ErrorGuaranteed>`.
//!
//! For the up-to-date list, see the `QueryModifiers` struct in
//! [`rustc_macros/src/query.rs`](https://github.com/rust-lang/rust/blob/master/compiler/rustc_macros/src/query.rs)
//! and for more details in incremental compilation, see the
//! [Query modifiers in incremental compilation](https://rustc-dev-guide.rust-lang.org/queries/incremental-compilation-in-detail.html#query-modifiers) section of the rustc-dev-guide.
//!
//! ## Query Expansion and Code Generation
//!
//! The [`rustc_macros::rustc_queries`] macro expands each query definition into:
//! - A method on [`TyCtxt`] (and [`TyCtxtAt`]) for invoking the query.
//! - Provider traits and structs for supplying the query's value.
//! - Caching and dependency graph integration.
//! - Support for incremental compilation, disk caching, and arena allocation as controlled by the modifiers.
//!
//! [`rustc_macros::rustc_queries`]: ../../rustc_macros/macro.rustc_queries.html
//!
//! The macro-based approach allows the query system to be highly flexible and maintainable, while minimizing boilerplate.
//!
//! For more details, see the [rustc-dev-guide](https://rustc-dev-guide.rust-lang.org/query.html).
#![allow(unused_parens)]

View file

@ -2273,23 +2273,18 @@ impl<'a> Parser<'a> {
),
// Also catches `fn foo(&a)`.
PatKind::Ref(ref inner_pat, mutab)
if matches!(inner_pat.clone().kind, PatKind::Ident(..)) =>
if let PatKind::Ident(_, ident, _) = inner_pat.clone().kind =>
{
match inner_pat.clone().kind {
PatKind::Ident(_, ident, _) => {
let mutab = mutab.prefix_str();
(
ident,
"self: ",
format!("{ident}: &{mutab}TypeName"),
"_: ",
pat.span.shrink_to_lo(),
pat.span,
pat.span.shrink_to_lo(),
)
}
_ => unreachable!(),
}
let mutab = mutab.prefix_str();
(
ident,
"self: ",
format!("{ident}: &{mutab}TypeName"),
"_: ",
pat.span.shrink_to_lo(),
pat.span,
pat.span.shrink_to_lo(),
)
}
_ => {
// Otherwise, try to get a type and emit a suggestion.

View file

@ -12,7 +12,8 @@ use rustc_middle::ty::layout::{
};
use rustc_middle::ty::print::{with_forced_trimmed_paths, with_no_trimmed_paths};
use rustc_middle::ty::{
GenericPredicates, Instance, List, ScalarInt, TyCtxt, TypeVisitableExt, ValTree,
CoroutineArgsExt, GenericPredicates, Instance, List, ScalarInt, TyCtxt, TypeVisitableExt,
ValTree,
};
use rustc_middle::{mir, ty};
use rustc_span::def_id::LOCAL_CRATE;
@ -22,9 +23,9 @@ use stable_mir::mir::mono::{InstanceDef, StaticDef};
use stable_mir::mir::{BinOp, Body, Place, UnOp};
use stable_mir::target::{MachineInfo, MachineSize};
use stable_mir::ty::{
AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, FieldDef, FnDef, ForeignDef,
ForeignItemKind, GenericArgs, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span, Ty,
TyConst, TyKind, UintTy, VariantDef,
AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, CoroutineDef, Discr, FieldDef, FnDef,
ForeignDef, ForeignItemKind, GenericArgs, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy,
Span, Ty, TyConst, TyKind, UintTy, VariantDef, VariantIdx,
};
use stable_mir::{Crate, CrateDef, CrateItem, CrateNum, DefId, Error, Filename, ItemKind, Symbol};
@ -447,6 +448,30 @@ impl<'tcx> SmirCtxt<'tcx> {
def.internal(&mut *tables, tcx).variants().len()
}
/// Discriminant for a given variant index of AdtDef
pub fn adt_discr_for_variant(&self, adt: AdtDef, variant: VariantIdx) -> Discr {
let mut tables = self.0.borrow_mut();
let tcx = tables.tcx;
let adt = adt.internal(&mut *tables, tcx);
let variant = variant.internal(&mut *tables, tcx);
adt.discriminant_for_variant(tcx, variant).stable(&mut *tables)
}
/// Discriminant for a given variand index and args of a coroutine
pub fn coroutine_discr_for_variant(
&self,
coroutine: CoroutineDef,
args: &GenericArgs,
variant: VariantIdx,
) -> Discr {
let mut tables = self.0.borrow_mut();
let tcx = tables.tcx;
let coroutine = coroutine.def_id().internal(&mut *tables, tcx);
let args = args.internal(&mut *tables, tcx);
let variant = variant.internal(&mut *tables, tcx);
args.as_coroutine().discriminant_for_variant(coroutine, tcx, variant).stable(&mut *tables)
}
/// The name of a variant.
pub fn variant_name(&self, def: VariantDef) -> Symbol {
let mut tables = self.0.borrow_mut();

View file

@ -960,3 +960,11 @@ impl<'tcx> Stable<'tcx> for ty::ImplTraitInTraitData {
}
}
}
impl<'tcx> Stable<'tcx> for rustc_middle::ty::util::Discr<'tcx> {
type T = stable_mir::ty::Discr;
fn stable(&self, tables: &mut Tables<'_>) -> Self::T {
stable_mir::ty::Discr { val: self.val, ty: self.ty.stable(tables) }
}
}

View file

@ -13,10 +13,10 @@ use stable_mir::mir::mono::{Instance, InstanceDef, StaticDef};
use stable_mir::mir::{BinOp, Body, Place, UnOp};
use stable_mir::target::MachineInfo;
use stable_mir::ty::{
AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, FieldDef, FnDef, ForeignDef,
ForeignItemKind, ForeignModule, ForeignModuleDef, GenericArgs, GenericPredicates, Generics,
ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span, TraitDecl,
TraitDef, Ty, TyConst, TyConstId, TyKind, UintTy, VariantDef,
AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, CoroutineDef, Discr, FieldDef, FnDef,
ForeignDef, ForeignItemKind, ForeignModule, ForeignModuleDef, GenericArgs, GenericPredicates,
Generics, ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span,
TraitDecl, TraitDef, Ty, TyConst, TyConstId, TyKind, UintTy, VariantDef, VariantIdx,
};
use stable_mir::{
AssocItems, Crate, CrateItem, CrateItems, CrateNum, DefId, Error, Filename, ImplTraitDecls,
@ -230,6 +230,21 @@ impl<'tcx> SmirInterface<'tcx> {
self.cx.adt_variants_len(def)
}
/// Discriminant for a given variant index of AdtDef
pub(crate) fn adt_discr_for_variant(&self, adt: AdtDef, variant: VariantIdx) -> Discr {
self.cx.adt_discr_for_variant(adt, variant)
}
/// Discriminant for a given variand index and args of a coroutine
pub(crate) fn coroutine_discr_for_variant(
&self,
coroutine: CoroutineDef,
args: &GenericArgs,
variant: VariantIdx,
) -> Discr {
self.cx.coroutine_discr_for_variant(coroutine, args, variant)
}
/// The name of a variant.
pub(crate) fn variant_name(&self, def: VariantDef) -> Symbol {
self.cx.variant_name(def)

View file

@ -756,6 +756,12 @@ crate_def! {
pub CoroutineDef;
}
impl CoroutineDef {
pub fn discriminant_for_variant(&self, args: &GenericArgs, idx: VariantIdx) -> Discr {
with(|cx| cx.coroutine_discr_for_variant(*self, args, idx))
}
}
crate_def! {
#[derive(Serialize)]
pub CoroutineClosureDef;
@ -831,6 +837,15 @@ impl AdtDef {
pub fn repr(&self) -> ReprOptions {
with(|cx| cx.adt_repr(*self))
}
pub fn discriminant_for_variant(&self, idx: VariantIdx) -> Discr {
with(|cx| cx.adt_discr_for_variant(*self, idx))
}
}
pub struct Discr {
pub val: u128,
pub ty: Ty,
}
/// Definition of a variant, which can be either a struct / union field or an enum variant.

View file

@ -30,7 +30,7 @@ use crate::fmt;
/// Files are compared as strings, not `Path`, which could be unexpected.
/// See [`Location::file`]'s documentation for more discussion.
#[lang = "panic_location"]
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
#[stable(feature = "panic_hooks", since = "1.10.0")]
pub struct Location<'a> {
// Note: this filename will have exactly one nul byte at its end, but otherwise
@ -43,6 +43,17 @@ pub struct Location<'a> {
col: u32,
}
#[stable(feature = "panic_hooks", since = "1.10.0")]
impl fmt::Debug for Location<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Location")
.field("file", &self.file())
.field("line", &self.line)
.field("column", &self.col)
.finish()
}
}
impl<'a> Location<'a> {
/// Returns the source location of the caller of this function. If that function's caller is
/// annotated then its call location will be returned, and so on up the stack to the first call

View file

@ -29,3 +29,11 @@ fn location_const_column() {
const COLUMN: u32 = CALLER.column();
assert_eq!(COLUMN, 40);
}
#[test]
fn location_debug() {
let f = format!("{:?}", Location::caller());
assert!(f.contains(&format!("{:?}", file!())));
assert!(f.contains("35"));
assert!(f.contains("29"));
}

View file

@ -121,7 +121,7 @@ pub struct File {
///
/// [`try_lock`]: File::try_lock
/// [`try_lock_shared`]: File::try_lock_shared
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub enum TryLockError {
/// The lock could not be acquired due to an I/O error on the file. The standard library will
/// not return an [`ErrorKind::WouldBlock`] error inside [`TryLockError::Error`]
@ -366,10 +366,10 @@ pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result
inner(path.as_ref(), contents.as_ref())
}
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
impl error::Error for TryLockError {}
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
impl fmt::Debug for TryLockError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
@ -379,7 +379,7 @@ impl fmt::Debug for TryLockError {
}
}
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
impl fmt::Display for TryLockError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
@ -390,7 +390,7 @@ impl fmt::Display for TryLockError {
}
}
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
impl From<TryLockError> for io::Error {
fn from(err: TryLockError) -> io::Error {
match err {
@ -713,7 +713,6 @@ impl File {
/// # Examples
///
/// ```no_run
/// #![feature(file_lock)]
/// use std::fs::File;
///
/// fn main() -> std::io::Result<()> {
@ -722,7 +721,7 @@ impl File {
/// Ok(())
/// }
/// ```
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub fn lock(&self) -> io::Result<()> {
self.inner.lock()
}
@ -766,7 +765,6 @@ impl File {
/// # Examples
///
/// ```no_run
/// #![feature(file_lock)]
/// use std::fs::File;
///
/// fn main() -> std::io::Result<()> {
@ -775,7 +773,7 @@ impl File {
/// Ok(())
/// }
/// ```
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub fn lock_shared(&self) -> io::Result<()> {
self.inner.lock_shared()
}
@ -824,7 +822,6 @@ impl File {
/// # Examples
///
/// ```no_run
/// #![feature(file_lock)]
/// use std::fs::{File, TryLockError};
///
/// fn main() -> std::io::Result<()> {
@ -840,7 +837,7 @@ impl File {
/// Ok(())
/// }
/// ```
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub fn try_lock(&self) -> Result<(), TryLockError> {
self.inner.try_lock()
}
@ -888,7 +885,6 @@ impl File {
/// # Examples
///
/// ```no_run
/// #![feature(file_lock)]
/// use std::fs::{File, TryLockError};
///
/// fn main() -> std::io::Result<()> {
@ -905,7 +901,7 @@ impl File {
/// Ok(())
/// }
/// ```
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub fn try_lock_shared(&self) -> Result<(), TryLockError> {
self.inner.try_lock_shared()
}
@ -933,7 +929,6 @@ impl File {
/// # Examples
///
/// ```no_run
/// #![feature(file_lock)]
/// use std::fs::File;
///
/// fn main() -> std::io::Result<()> {
@ -943,7 +938,7 @@ impl File {
/// Ok(())
/// }
/// ```
#[unstable(feature = "file_lock", issue = "130994")]
#[stable(feature = "file_lock", since = "CURRENT_RUSTC_VERSION")]
pub fn unlock(&self) -> io::Result<()> {
self.inner.unlock()
}

View file

@ -1882,6 +1882,19 @@ impl FromStr for PathBuf {
#[stable(feature = "rust1", since = "1.0.0")]
impl<P: AsRef<Path>> FromIterator<P> for PathBuf {
/// Creates a new `PathBuf` from the [`Path`] elements of an iterator.
///
/// This uses [`push`](Self::push) to add each element, so can be used to adjoin multiple path
/// [components](Components).
///
/// # Examples
/// ```
/// # use std::path::PathBuf;
/// let path = PathBuf::from_iter(["/tmp", "foo", "bar"]);
/// assert_eq!(path, PathBuf::from("/tmp/foo/bar"));
/// ```
///
/// See documentation for [`push`](Self::push) for more details on how the path is constructed.
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> PathBuf {
let mut buf = PathBuf::new();
buf.extend(iter);
@ -1891,6 +1904,20 @@ impl<P: AsRef<Path>> FromIterator<P> for PathBuf {
#[stable(feature = "rust1", since = "1.0.0")]
impl<P: AsRef<Path>> Extend<P> for PathBuf {
/// Extends `self` with [`Path`] elements from `iter`.
///
/// This uses [`push`](Self::push) to add each element, so can be used to adjoin multiple path
/// [components](Components).
///
/// # Examples
/// ```
/// # use std::path::PathBuf;
/// let mut path = PathBuf::from("/tmp");
/// path.extend(["foo", "bar", "file.txt"]);
/// assert_eq!(path, PathBuf::from("/tmp/foo/bar/file.txt"));
/// ```
///
/// See documentation for [`push`](Self::push) for more details on how the path is constructed.
fn extend<I: IntoIterator<Item = P>>(&mut self, iter: I) {
iter.into_iter().for_each(move |p| self.push(p.as_ref()));
}

View file

@ -44,6 +44,7 @@ dependencies = [
"fd-lock",
"home",
"ignore",
"insta",
"junction",
"libc",
"object",
@ -158,6 +159,18 @@ dependencies = [
"cc",
]
[[package]]
name = "console"
version = "0.15.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
dependencies = [
"encode_unicode",
"libc",
"once_cell",
"windows-sys 0.59.0",
]
[[package]]
name = "cpufeatures"
version = "0.2.15"
@ -218,6 +231,12 @@ dependencies = [
"crypto-common",
]
[[package]]
name = "encode_unicode"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]]
name = "errno"
version = "0.3.11"
@ -323,6 +342,17 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "insta"
version = "1.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
dependencies = [
"console",
"once_cell",
"similar",
]
[[package]]
name = "itoa"
version = "1.0.11"
@ -675,6 +705,12 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "similar"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
[[package]]
name = "smallvec"
version = "1.13.2"

View file

@ -84,6 +84,7 @@ features = [
[dev-dependencies]
pretty_assertions = "1.4"
tempfile = "3.15.0"
insta = "1.43"
# We care a lot about bootstrap's compile times, so don't include debuginfo for
# dependencies, only bootstrap itself.

View file

@ -200,6 +200,10 @@ please file issues on the [Rust issue tracker][rust-issue-tracker].
[rust-bootstrap-zulip]: https://rust-lang.zulipchat.com/#narrow/stream/t-infra.2Fbootstrap
[rust-issue-tracker]: https://github.com/rust-lang/rust/issues
## Testing
To run bootstrap tests, execute `x test bootstrap`. If you want to bless snapshot tests, then install `cargo-insta` (`cargo install cargo-insta`) and then run `cargo insta review --manifest-path src/bootstrap/Cargo.toml`.
## Changelog
Because we do not release bootstrap with versions, we also do not maintain CHANGELOG files. To

View file

@ -2009,7 +2009,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
if !builder.config.dry_run() && target.is_msvc() {
for (k, v) in builder.cc.borrow()[&target].env() {
for (k, v) in builder.cc[&target].env() {
if k != "PATH" {
cmd.env(k, v);
}
@ -2026,8 +2026,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// address sanitizer enabled (e.g., ntdll.dll).
cmd.env("ASAN_WIN_CONTINUE_ON_INTERCEPTION_FAILURE", "1");
// Add the address sanitizer runtime to the PATH - it is located next to cl.exe.
let asan_runtime_path =
builder.cc.borrow()[&target].path().parent().unwrap().to_path_buf();
let asan_runtime_path = builder.cc[&target].path().parent().unwrap().to_path_buf();
let old_path = cmd
.get_envs()
.find_map(|(k, v)| (k == "PATH").then_some(v))
@ -3059,6 +3058,8 @@ impl Step for Bootstrap {
cargo
.rustflag("-Cdebuginfo=2")
.env("CARGO_TARGET_DIR", builder.out.join("bootstrap"))
// Needed for insta to correctly write pending snapshots to the right directories.
.env("INSTA_WORKSPACE_ROOT", &builder.src)
.env("RUSTC_BOOTSTRAP", "1");
// bootstrap tests are racy on directory creation so just run them one at a time.

View file

@ -1334,7 +1334,7 @@ impl Builder<'_> {
if compiler.host.is_msvc() {
let curpaths = env::var_os("PATH").unwrap_or_default();
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
for (k, v) in self.cc.borrow()[&compiler.host].env() {
for (k, v) in self.cc[&compiler.host].env() {
if k != "PATH" {
continue;
}

View file

@ -278,9 +278,7 @@ impl Cargo {
self.rustdocflags.arg(&arg);
}
if !builder.config.dry_run()
&& builder.cc.borrow()[&target].args().iter().any(|arg| arg == "-gz")
{
if !builder.config.dry_run() && builder.cc[&target].args().iter().any(|arg| arg == "-gz") {
self.rustflags.arg("-Clink-arg=-gz");
}

View file

@ -5,7 +5,7 @@ use std::fmt::{self, Debug, Write};
use std::hash::Hash;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::LazyLock;
use std::sync::{LazyLock, OnceLock};
use std::time::{Duration, Instant};
use std::{env, fs};
@ -60,6 +60,9 @@ pub struct Builder<'a> {
/// to do. For example: with `./x check foo bar` we get `paths=["foo",
/// "bar"]`.
pub paths: Vec<PathBuf>,
/// Cached list of submodules from self.build.src.
submodule_paths_cache: OnceLock<Vec<String>>,
}
impl Deref for Builder<'_> {
@ -687,7 +690,7 @@ impl<'a> ShouldRun<'a> {
///
/// [`path`]: ShouldRun::path
pub fn paths(mut self, paths: &[&str]) -> Self {
let submodules_paths = build_helper::util::parse_gitmodules(&self.builder.src);
let submodules_paths = self.builder.submodule_paths();
self.paths.insert(PathSet::Set(
paths
@ -1180,6 +1183,7 @@ impl<'a> Builder<'a> {
stack: RefCell::new(Vec::new()),
time_spent_on_dependencies: Cell::new(Duration::new(0, 0)),
paths,
submodule_paths_cache: Default::default(),
}
}
@ -1510,6 +1514,19 @@ impl<'a> Builder<'a> {
None
}
/// Updates all submodules, and exits with an error if submodule
/// management is disabled and the submodule does not exist.
pub fn require_and_update_all_submodules(&self) {
for submodule in self.submodule_paths() {
self.require_submodule(submodule, None);
}
}
/// Get all submodules from the src directory.
pub fn submodule_paths(&self) -> &[String] {
self.submodule_paths_cache.get_or_init(|| build_helper::util::parse_gitmodules(&self.src))
}
/// Ensure that a given step is built, returning its output. This will
/// cache the step, so it is safe (and good!) to call this as often as
/// needed to ensure that all dependencies are built.

View file

@ -15,11 +15,12 @@ static TEST_TRIPLE_2: &str = "i686-unknown-hurd-gnu";
static TEST_TRIPLE_3: &str = "i686-unknown-netbsd";
fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config {
configure_with_args(&[cmd.to_owned()], host, target)
configure_with_args(&[cmd], host, target)
}
fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config {
let mut config = Config::parse(Flags::parse(cmd));
fn configure_with_args(cmd: &[&str], host: &[&str], target: &[&str]) -> Config {
let cmd = cmd.iter().copied().map(String::from).collect::<Vec<_>>();
let mut config = Config::parse(Flags::parse(&cmd));
// don't save toolstates
config.save_toolstates = None;
config.set_dry_run(DryRun::SelfCheck);
@ -67,7 +68,7 @@ fn run_build(paths: &[PathBuf], config: Config) -> Cache {
fn check_cli<const N: usize>(paths: [&str; N]) {
run_build(
&paths.map(PathBuf::from),
configure_with_args(&paths.map(String::from), &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]),
configure_with_args(&paths, &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]),
);
}
@ -1000,8 +1001,7 @@ mod sysroot_target_dirs {
/// cg_gcc tests instead.
#[test]
fn test_test_compiler() {
let cmd = &["test", "compiler"].map(str::to_owned);
let config = configure_with_args(cmd, &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let config = configure_with_args(&["test", "compiler"], &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let cache = run_build(&config.paths.clone(), config);
let compiler = cache.contains::<test::CrateLibrustc>();
@ -1034,8 +1034,7 @@ fn test_test_coverage() {
// Print each test case so that if one fails, the most recently printed
// case is the one that failed.
println!("Testing case: {cmd:?}");
let cmd = cmd.iter().copied().map(str::to_owned).collect::<Vec<_>>();
let config = configure_with_args(&cmd, &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let config = configure_with_args(cmd, &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let mut cache = run_build(&config.paths.clone(), config);
let modes =
@ -1207,8 +1206,7 @@ fn test_get_tool_rustc_compiler() {
/// of `Any { .. }`.
#[test]
fn step_cycle_debug() {
let cmd = ["run", "cyclic-step"].map(str::to_owned);
let config = configure_with_args(&cmd, &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let config = configure_with_args(&["run", "cyclic-step"], &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]);
let err = panic::catch_unwind(|| run_build(&config.paths.clone(), config)).unwrap_err();
let err = err.downcast_ref::<String>().unwrap().as_str();
@ -1233,3 +1231,81 @@ fn any_debug() {
// Downcasting to the underlying type should succeed.
assert_eq!(x.downcast_ref::<MyStruct>(), Some(&MyStruct { x: 7 }));
}
/// The staging tests use insta for snapshot testing.
/// See bootstrap's README on how to bless the snapshots.
mod staging {
use crate::core::builder::tests::{
TEST_TRIPLE_1, configure, configure_with_args, render_steps, run_build,
};
#[test]
fn build_compiler_stage_1() {
let mut cache = run_build(
&["compiler".into()],
configure_with_args(&["build", "--stage", "1"], &[TEST_TRIPLE_1], &[TEST_TRIPLE_1]),
);
let steps = cache.into_executed_steps();
insta::assert_snapshot!(render_steps(&steps), @r"
[build] rustc 0 <target1> -> std 0 <target1>
[build] llvm <target1>
[build] rustc 0 <target1> -> rustc 1 <target1>
[build] rustc 0 <target1> -> rustc 1 <target1>
");
}
}
/// Renders the executed bootstrap steps for usage in snapshot tests with insta.
/// Only renders certain important steps.
/// Each value in `steps` should be a tuple of (Step, step output).
fn render_steps(steps: &[(Box<dyn Any>, Box<dyn Any>)]) -> String {
steps
.iter()
.filter_map(|(step, output)| {
// FIXME: implement an optional method on Step to produce metadata for test, instead
// of this downcasting
if let Some((rustc, output)) = downcast_step::<compile::Rustc>(step, output) {
Some(format!(
"[build] {} -> {}",
render_compiler(rustc.build_compiler),
// FIXME: return the correct stage from the `Rustc` step, now it behaves weirdly
render_compiler(Compiler::new(rustc.build_compiler.stage + 1, rustc.target)),
))
} else if let Some((std, output)) = downcast_step::<compile::Std>(step, output) {
Some(format!(
"[build] {} -> std {} <{}>",
render_compiler(std.compiler),
std.compiler.stage,
std.target
))
} else if let Some((llvm, output)) = downcast_step::<llvm::Llvm>(step, output) {
Some(format!("[build] llvm <{}>", llvm.target))
} else {
None
}
})
.map(|line| {
line.replace(TEST_TRIPLE_1, "target1")
.replace(TEST_TRIPLE_2, "target2")
.replace(TEST_TRIPLE_3, "target3")
})
.collect::<Vec<_>>()
.join("\n")
}
fn downcast_step<'a, S: Step>(
step: &'a Box<dyn Any>,
output: &'a Box<dyn Any>,
) -> Option<(&'a S, &'a S::Output)> {
let Some(step) = step.downcast_ref::<S>() else {
return None;
};
let Some(output) = output.downcast_ref::<S::Output>() else {
return None;
};
Some((step, output))
}
fn render_compiler(compiler: Compiler) -> String {
format!("rustc {} <{}>", compiler.stage, compiler.host)
}

View file

@ -17,7 +17,7 @@
//! also check out the `src/bootstrap/README.md` file for more information.
#![cfg_attr(test, allow(unused))]
use std::cell::{Cell, RefCell};
use std::cell::Cell;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt::Display;
use std::path::{Path, PathBuf};
@ -189,10 +189,12 @@ pub struct Build {
// Runtime state filled in later on
// C/C++ compilers and archiver for all targets
cc: RefCell<HashMap<TargetSelection, cc::Tool>>,
cxx: RefCell<HashMap<TargetSelection, cc::Tool>>,
ar: RefCell<HashMap<TargetSelection, PathBuf>>,
ranlib: RefCell<HashMap<TargetSelection, PathBuf>>,
cc: HashMap<TargetSelection, cc::Tool>,
cxx: HashMap<TargetSelection, cc::Tool>,
ar: HashMap<TargetSelection, PathBuf>,
ranlib: HashMap<TargetSelection, PathBuf>,
wasi_sdk_path: Option<PathBuf>,
// Miscellaneous
// allow bidirectional lookups: both name -> path and path -> name
crates: HashMap<String, Crate>,
@ -466,10 +468,11 @@ impl Build {
enzyme_info,
in_tree_llvm_info,
in_tree_gcc_info,
cc: RefCell::new(HashMap::new()),
cxx: RefCell::new(HashMap::new()),
ar: RefCell::new(HashMap::new()),
ranlib: RefCell::new(HashMap::new()),
cc: HashMap::new(),
cxx: HashMap::new(),
ar: HashMap::new(),
ranlib: HashMap::new(),
wasi_sdk_path: env::var_os("WASI_SDK_PATH").map(PathBuf::from),
crates: HashMap::new(),
crate_paths: HashMap::new(),
is_sudo,
@ -498,7 +501,7 @@ impl Build {
}
build.verbose(|| println!("finding compilers"));
utils::cc_detect::find(&build);
utils::cc_detect::fill_compilers(&mut build);
// When running `setup`, the profile is about to change, so any requirements we have now may
// be different on the next invocation. Don't check for them until the next time x.py is
// run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
@ -593,14 +596,6 @@ impl Build {
}
}
/// Updates all submodules, and exits with an error if submodule
/// management is disabled and the submodule does not exist.
pub fn require_and_update_all_submodules(&self) {
for submodule in build_helper::util::parse_gitmodules(&self.src) {
self.require_submodule(submodule, None);
}
}
/// If any submodule has been initialized already, sync it unconditionally.
/// This avoids contributors checking in a submodule change by accident.
fn update_existing_submodules(&self) {
@ -1143,17 +1138,17 @@ impl Build {
if self.config.dry_run() {
return PathBuf::new();
}
self.cc.borrow()[&target].path().into()
self.cc[&target].path().into()
}
/// Returns the internal `cc::Tool` for the C compiler.
fn cc_tool(&self, target: TargetSelection) -> Tool {
self.cc.borrow()[&target].clone()
self.cc[&target].clone()
}
/// Returns the internal `cc::Tool` for the C++ compiler.
fn cxx_tool(&self, target: TargetSelection) -> Tool {
self.cxx.borrow()[&target].clone()
self.cxx[&target].clone()
}
/// Returns C flags that `cc-rs` thinks should be enabled for the
@ -1163,8 +1158,8 @@ impl Build {
return Vec::new();
}
let base = match c {
CLang::C => self.cc.borrow()[&target].clone(),
CLang::Cxx => self.cxx.borrow()[&target].clone(),
CLang::C => self.cc[&target].clone(),
CLang::Cxx => self.cxx[&target].clone(),
};
// Filter out -O and /O (the optimization flags) that we picked up
@ -1217,7 +1212,7 @@ impl Build {
if self.config.dry_run() {
return None;
}
self.ar.borrow().get(&target).cloned()
self.ar.get(&target).cloned()
}
/// Returns the path to the `ranlib` utility for the target specified.
@ -1225,7 +1220,7 @@ impl Build {
if self.config.dry_run() {
return None;
}
self.ranlib.borrow().get(&target).cloned()
self.ranlib.get(&target).cloned()
}
/// Returns the path to the C++ compiler for the target specified.
@ -1233,7 +1228,7 @@ impl Build {
if self.config.dry_run() {
return Ok(PathBuf::new());
}
match self.cxx.borrow().get(&target) {
match self.cxx.get(&target) {
Some(p) => Ok(p.path().into()),
None => Err(format!("target `{target}` is not configured as a host, only as a target")),
}
@ -1250,7 +1245,7 @@ impl Build {
} else if target.contains("vxworks") {
// need to use CXX compiler as linker to resolve the exception functions
// that are only existed in CXX libraries
Some(self.cxx.borrow()[&target].path().into())
Some(self.cxx[&target].path().into())
} else if !self.config.is_host_target(target)
&& helpers::use_host_linker(target)
&& !target.is_msvc()

View file

@ -1,32 +1,28 @@
use std::path::PathBuf;
use crate::{BuildStamp, Config, Flags};
use tempfile::TempDir;
fn temp_dir() -> PathBuf {
let config =
Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]));
config.tempdir()
}
use crate::{BuildStamp, Config, Flags};
#[test]
#[should_panic(expected = "prefix can not start or end with '.'")]
fn test_with_invalid_prefix() {
let dir = temp_dir();
BuildStamp::new(&dir).with_prefix(".invalid");
let dir = TempDir::new().unwrap();
BuildStamp::new(dir.path()).with_prefix(".invalid");
}
#[test]
#[should_panic(expected = "prefix can not start or end with '.'")]
fn test_with_invalid_prefix2() {
let dir = temp_dir();
BuildStamp::new(&dir).with_prefix("invalid.");
let dir = TempDir::new().unwrap();
BuildStamp::new(dir.path()).with_prefix("invalid.");
}
#[test]
fn test_is_up_to_date() {
let dir = temp_dir();
let dir = TempDir::new().unwrap();
let mut build_stamp = BuildStamp::new(&dir).add_stamp("v1.0.0");
let mut build_stamp = BuildStamp::new(dir.path()).add_stamp("v1.0.0");
build_stamp.write().unwrap();
assert!(
@ -45,9 +41,9 @@ fn test_is_up_to_date() {
#[test]
fn test_with_prefix() {
let dir = temp_dir();
let dir = TempDir::new().unwrap();
let stamp = BuildStamp::new(&dir).add_stamp("v1.0.0");
let stamp = BuildStamp::new(dir.path()).add_stamp("v1.0.0");
assert_eq!(stamp.path.file_name().unwrap(), ".stamp");
let stamp = stamp.with_prefix("test");

View file

@ -17,6 +17,7 @@ use std::borrow::Borrow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::ops::Deref;
@ -208,25 +209,30 @@ pub static INTERNER: LazyLock<Interner> = LazyLock::new(Interner::default);
/// any type in its output. It is a write-once cache; values are never evicted,
/// which means that references to the value can safely be returned from the
/// `get()` method.
#[derive(Debug)]
pub struct Cache(
RefCell<
#[derive(Debug, Default)]
pub struct Cache {
cache: RefCell<
HashMap<
TypeId,
Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
>,
>,
);
#[cfg(test)]
/// Contains steps in the same order in which they were executed
/// Useful for tests
/// Tuples (step, step output)
executed_steps: RefCell<Vec<(Box<dyn Any>, Box<dyn Any>)>>,
}
impl Cache {
/// Creates a new empty cache.
pub fn new() -> Cache {
Cache(RefCell::new(HashMap::new()))
Cache::default()
}
/// Stores the result of a computation step in the cache.
pub fn put<S: Step>(&self, step: S, value: S::Output) {
let mut cache = self.0.borrow_mut();
let mut cache = self.cache.borrow_mut();
let type_id = TypeId::of::<S>();
let stepcache = cache
.entry(type_id)
@ -234,12 +240,20 @@ impl Cache {
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
assert!(!stepcache.contains_key(&step), "processing {step:?} a second time");
#[cfg(test)]
{
let step: Box<dyn Any> = Box::new(step.clone());
let output: Box<dyn Any> = Box::new(value.clone());
self.executed_steps.borrow_mut().push((step, output));
}
stepcache.insert(step, value);
}
/// Retrieves a cached result for the given step, if available.
pub fn get<S: Step>(&self, step: &S) -> Option<S::Output> {
let mut cache = self.0.borrow_mut();
let mut cache = self.cache.borrow_mut();
let type_id = TypeId::of::<S>();
let stepcache = cache
.entry(type_id)
@ -252,8 +266,8 @@ impl Cache {
#[cfg(test)]
impl Cache {
pub fn all<S: Ord + Clone + Step>(&mut self) -> Vec<(S, S::Output)> {
let cache = self.0.get_mut();
pub fn all<S: Ord + Step>(&mut self) -> Vec<(S, S::Output)> {
let cache = self.cache.get_mut();
let type_id = TypeId::of::<S>();
let mut v = cache
.remove(&type_id)
@ -265,7 +279,12 @@ impl Cache {
}
pub fn contains<S: Step>(&self) -> bool {
self.0.borrow().contains_key(&TypeId::of::<S>())
self.cache.borrow().contains_key(&TypeId::of::<S>())
}
#[cfg(test)]
pub fn into_executed_steps(mut self) -> Vec<(Box<dyn Any>, Box<dyn Any>)> {
mem::take(&mut self.executed_steps.borrow_mut())
}
}

View file

@ -61,8 +61,8 @@ fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build {
///
/// This function determines which targets need a C compiler (and, if needed, a C++ compiler)
/// by combining the primary build target, host targets, and any additional targets. For
/// each target, it calls [`find_target`] to configure the necessary compiler tools.
pub fn find(build: &Build) {
/// each target, it calls [`fill_target_compiler`] to configure the necessary compiler tools.
pub fn fill_compilers(build: &mut Build) {
let targets: HashSet<_> = match build.config.cmd {
// We don't need to check cross targets for these commands.
crate::Subcommand::Clean { .. }
@ -87,7 +87,7 @@ pub fn find(build: &Build) {
};
for target in targets.into_iter() {
find_target(build, target);
fill_target_compiler(build, target);
}
}
@ -96,7 +96,7 @@ pub fn find(build: &Build) {
/// This function uses both user-specified configuration (from `bootstrap.toml`) and auto-detection
/// logic to determine the correct C/C++ compilers for the target. It also determines the appropriate
/// archiver (`ar`) and sets up additional compilation flags (both handled and unhandled).
pub fn find_target(build: &Build, target: TargetSelection) {
pub fn fill_target_compiler(build: &mut Build, target: TargetSelection) {
let mut cfg = new_cc_build(build, target);
let config = build.config.target_config.get(&target);
if let Some(cc) = config
@ -113,7 +113,7 @@ pub fn find_target(build: &Build, target: TargetSelection) {
cfg.try_get_archiver().map(|c| PathBuf::from(c.get_program())).ok()
};
build.cc.borrow_mut().insert(target, compiler.clone());
build.cc.insert(target, compiler.clone());
let mut cflags = build.cc_handled_clags(target, CLang::C);
cflags.extend(build.cc_unhandled_cflags(target, GitRepo::Rustc, CLang::C));
@ -135,7 +135,7 @@ pub fn find_target(build: &Build, target: TargetSelection) {
// for VxWorks, record CXX compiler which will be used in lib.rs:linker()
if cxx_configured || target.contains("vxworks") {
let compiler = cfg.get_compiler();
build.cxx.borrow_mut().insert(target, compiler);
build.cxx.insert(target, compiler);
}
build.verbose(|| println!("CC_{} = {:?}", target.triple, build.cc(target)));
@ -148,11 +148,11 @@ pub fn find_target(build: &Build, target: TargetSelection) {
}
if let Some(ar) = ar {
build.verbose(|| println!("AR_{} = {ar:?}", target.triple));
build.ar.borrow_mut().insert(target, ar);
build.ar.insert(target, ar);
}
if let Some(ranlib) = config.and_then(|c| c.ranlib.clone()) {
build.ranlib.borrow_mut().insert(target, ranlib);
build.ranlib.insert(target, ranlib);
}
}
@ -221,7 +221,10 @@ fn default_compiler(
}
t if t.contains("-wasi") => {
let root = PathBuf::from(std::env::var_os("WASI_SDK_PATH")?);
let root = build
.wasi_sdk_path
.as_ref()
.expect("WASI_SDK_PATH mut be configured for a -wasi target");
let compiler = match compiler {
Language::C => format!("{t}-clang"),
Language::CPlusPlus => format!("{t}-clang++"),

View file

@ -77,11 +77,11 @@ fn test_new_cc_build() {
#[test]
fn test_default_compiler_wasi() {
let build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) });
let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) });
let target = TargetSelection::from_user("wasm32-wasi");
let wasi_sdk = PathBuf::from("/wasi-sdk");
// SAFETY: bootstrap tests run on a single thread
unsafe { env::set_var("WASI_SDK_PATH", &wasi_sdk) };
build.wasi_sdk_path = Some(wasi_sdk.clone());
let mut cfg = cc::Build::new();
if let Some(result) = default_compiler(&mut cfg, Language::C, target.clone(), &build) {
let expected = {
@ -94,10 +94,6 @@ fn test_default_compiler_wasi() {
"default_compiler should return a compiler path for wasi target when WASI_SDK_PATH is set"
);
}
// SAFETY: bootstrap tests run on a single thread
unsafe {
env::remove_var("WASI_SDK_PATH");
}
}
#[test]
@ -119,18 +115,14 @@ fn test_find_target_with_config() {
target_config.ar = Some(PathBuf::from("dummy-ar"));
target_config.ranlib = Some(PathBuf::from("dummy-ranlib"));
build.config.target_config.insert(target.clone(), target_config);
find_target(&build, target.clone());
let binding = build.cc.borrow();
let cc_tool = binding.get(&target).unwrap();
fill_target_compiler(&mut build, target.clone());
let cc_tool = build.cc.get(&target).unwrap();
assert_eq!(cc_tool.path(), &PathBuf::from("dummy-cc"));
let binding = build.cxx.borrow();
let cxx_tool = binding.get(&target).unwrap();
let cxx_tool = build.cxx.get(&target).unwrap();
assert_eq!(cxx_tool.path(), &PathBuf::from("dummy-cxx"));
let binding = build.ar.borrow();
let ar = binding.get(&target).unwrap();
let ar = build.ar.get(&target).unwrap();
assert_eq!(ar, &PathBuf::from("dummy-ar"));
let binding = build.ranlib.borrow();
let ranlib = binding.get(&target).unwrap();
let ranlib = build.ranlib.get(&target).unwrap();
assert_eq!(ranlib, &PathBuf::from("dummy-ranlib"));
}
@ -139,12 +131,12 @@ fn test_find_target_without_config() {
let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) });
let target = TargetSelection::from_user("x86_64-unknown-linux-gnu");
build.config.target_config.clear();
find_target(&build, target.clone());
assert!(build.cc.borrow().contains_key(&target));
fill_target_compiler(&mut build, target.clone());
assert!(build.cc.contains_key(&target));
if !target.triple.contains("vxworks") {
assert!(build.cxx.borrow().contains_key(&target));
assert!(build.cxx.contains_key(&target));
}
assert!(build.ar.borrow().contains_key(&target));
assert!(build.ar.contains_key(&target));
}
#[test]
@ -154,8 +146,8 @@ fn test_find() {
let target2 = TargetSelection::from_user("x86_64-unknown-openbsd");
build.targets.push(target1.clone());
build.hosts.push(target2.clone());
find(&build);
fill_compilers(&mut build);
for t in build.hosts.iter().chain(build.targets.iter()).chain(iter::once(&build.host_target)) {
assert!(build.cc.borrow().contains_key(t), "CC not set for target {}", t.triple);
assert!(build.cc.contains_key(t), "CC not set for target {}", t.triple);
}
}

View file

@ -98,7 +98,7 @@ pub fn is_dylib(path: &Path) -> bool {
/// Return the path to the containing submodule if available.
pub fn submodule_path_of(builder: &Builder<'_>, path: &str) -> Option<String> {
let submodule_paths = build_helper::util::parse_gitmodules(&builder.src);
let submodule_paths = builder.submodule_paths();
submodule_paths.iter().find_map(|submodule_path| {
if path.starts_with(submodule_path) { Some(submodule_path.to_string()) } else { None }
})

View file

@ -6,6 +6,9 @@
#![allow(dead_code)]
#[cfg(test)]
mod tests;
use std::env;
use std::ffi::OsString;
use std::fs::OpenOptions;

View file

@ -1,10 +1,3 @@
//! The `shared_helpers` module can't have its own tests submodule, because
//! that would cause problems for the shim binaries that include it via
//! `#[path]`, so instead those unit tests live here.
//!
//! To prevent tidy from complaining about this file not being named `tests.rs`,
//! it lives inside a submodule directory named `tests`.
use crate::utils::shared_helpers::parse_value_from_args;
#[test]

View file

@ -1,2 +1,3 @@
//! This module contains shared utilities for bootstrap tests.
pub mod git;
mod shared_helpers_tests;

View file

@ -2,7 +2,6 @@ use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
use std::process::Command;
use std::sync::OnceLock;
/// Invokes `build_helper::util::detail_exit` with `cfg!(test)`
///
@ -51,25 +50,20 @@ pub fn try_run(cmd: &mut Command, print_cmd_on_fail: bool) -> Result<(), ()> {
}
/// Returns the submodule paths from the `.gitmodules` file in the given directory.
pub fn parse_gitmodules(target_dir: &Path) -> &[String] {
static SUBMODULES_PATHS: OnceLock<Vec<String>> = OnceLock::new();
pub fn parse_gitmodules(target_dir: &Path) -> Vec<String> {
let gitmodules = target_dir.join(".gitmodules");
assert!(gitmodules.exists(), "'{}' file is missing.", gitmodules.display());
let init_submodules_paths = || {
let file = File::open(gitmodules).unwrap();
let file = File::open(gitmodules).unwrap();
let mut submodules_paths = vec![];
for line in BufReader::new(file).lines().map_while(Result::ok) {
let line = line.trim();
if line.starts_with("path") {
let actual_path = line.split(' ').last().expect("Couldn't get value of path");
submodules_paths.push(actual_path.to_owned());
}
let mut submodules_paths = vec![];
for line in BufReader::new(file).lines().map_while(Result::ok) {
let line = line.trim();
if line.starts_with("path") {
let actual_path = line.split(' ').last().expect("Couldn't get value of path");
submodules_paths.push(actual_path.to_owned());
}
}
submodules_paths
};
SUBMODULES_PATHS.get_or_init(|| init_submodules_paths())
submodules_paths
}

View file

@ -22,10 +22,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
RUN mkdir -p /config
RUN echo "[rust]" > /config/nopt-std-config.toml
RUN echo "optimize = false" >> /config/nopt-std-config.toml
ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests
ARG SCRIPT_ARG
COPY scripts/stage_2_test_set1.sh /scripts/

View file

@ -22,12 +22,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
RUN mkdir -p /config
RUN echo "[rust]" > /config/nopt-std-config.toml
RUN echo "optimize = false" >> /config/nopt-std-config.toml
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu \
--disable-optimize-tests \
--set rust.test-compare-mode
ENV SCRIPT python3 ../x.py test --stage 1 --config /config/nopt-std-config.toml library/std \
ENV SCRIPT python3 ../x.py test --stage 1 --set rust.optimize=false library/std \
&& python3 ../x.py --stage 2 test

View file

@ -300,7 +300,7 @@ auto:
env:
IMAGE: i686-gnu-nopt
DOCKER_SCRIPT: >-
python3 ../x.py test --stage 1 --config /config/nopt-std-config.toml library/std &&
python3 ../x.py test --stage 1 --set rust.optimize=false library/std &&
/scripts/stage_2_test_set2.sh
<<: *job-linux-4c

View file

@ -16,7 +16,7 @@
#![feature(unqualified_local_imports)]
#![feature(derive_coerce_pointee)]
#![feature(arbitrary_self_types)]
#![feature(file_lock)]
#![cfg_attr(bootstrap, feature(file_lock))]
// Configure clippy and other lints
#![allow(
clippy::collapsible_else_if,

View file

@ -2,7 +2,6 @@
#![feature(io_error_more)]
#![feature(io_error_uncategorized)]
#![feature(file_lock)]
use std::collections::BTreeMap;
use std::ffi::OsString;

View file

@ -486,7 +486,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_legacy_const_generics, Normal, template!(List: "N"), ErrorFollowing,
INTERNAL_UNSTABLE
),
// Do not const-check this function's body. It will always get replaced during CTFE.
// Do not const-check this function's body. It will always get replaced during CTFE via `hook_special_const_fn`.
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
),

View file

@ -0,0 +1,183 @@
//@ run-pass
//! Test that users are able to use stable mir APIs to retrieve
//! discriminant value and type for AdtDef and Coroutine variants
//@ ignore-stage1
//@ ignore-cross-compile
//@ ignore-remote
//@ edition: 2024
#![feature(rustc_private)]
#![feature(assert_matches)]
extern crate rustc_middle;
#[macro_use]
extern crate rustc_smir;
extern crate rustc_driver;
extern crate rustc_interface;
extern crate stable_mir;
use std::io::Write;
use std::ops::ControlFlow;
use stable_mir::CrateItem;
use stable_mir::crate_def::CrateDef;
use stable_mir::mir::{AggregateKind, Rvalue, Statement, StatementKind};
use stable_mir::ty::{IntTy, RigidTy, Ty};
const CRATE_NAME: &str = "crate_variant_ty";
/// Test if we can retrieve discriminant info for different types.
fn test_def_tys() -> ControlFlow<()> {
check_adt_mono();
check_adt_poly();
check_adt_poly2();
ControlFlow::Continue(())
}
fn check_adt_mono() {
let mono = get_fn("mono").expect_body();
check_statement_is_aggregate_assign(
&mono.blocks[0].statements[0],
0,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&mono.blocks[1].statements[0],
1,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&mono.blocks[2].statements[0],
2,
RigidTy::Int(IntTy::Isize),
);
}
fn check_adt_poly() {
let poly = get_fn("poly").expect_body();
check_statement_is_aggregate_assign(
&poly.blocks[0].statements[0],
0,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&poly.blocks[1].statements[0],
1,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&poly.blocks[2].statements[0],
2,
RigidTy::Int(IntTy::Isize),
);
}
fn check_adt_poly2() {
let poly = get_fn("poly2").expect_body();
check_statement_is_aggregate_assign(
&poly.blocks[0].statements[0],
0,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&poly.blocks[1].statements[0],
1,
RigidTy::Int(IntTy::Isize),
);
check_statement_is_aggregate_assign(
&poly.blocks[2].statements[0],
2,
RigidTy::Int(IntTy::Isize),
);
}
fn get_fn(name: &str) -> CrateItem {
stable_mir::all_local_items().into_iter().find(|it| it.name().eq(name)).unwrap()
}
fn check_statement_is_aggregate_assign(
statement: &Statement,
expected_discr_val: u128,
expected_discr_ty: RigidTy,
) {
if let Statement { kind: StatementKind::Assign(_, rvalue), .. } = statement
&& let Rvalue::Aggregate(aggregate, _) = rvalue
&& let AggregateKind::Adt(adt_def, variant_idx, ..) = aggregate
{
let discr = adt_def.discriminant_for_variant(*variant_idx);
assert_eq!(discr.val, expected_discr_val);
assert_eq!(discr.ty, Ty::from_rigid_kind(expected_discr_ty));
} else {
unreachable!("Unexpected statement");
}
}
/// This test will generate and analyze a dummy crate using the stable mir.
/// For that, it will first write the dummy crate into a file.
/// Then it will create a `StableMir` using custom arguments and then
/// it will run the compiler.
fn main() {
let path = "defs_ty_input.rs";
generate_input(&path).unwrap();
let args = &[
"rustc".to_string(),
"-Cpanic=abort".to_string(),
"--crate-name".to_string(),
CRATE_NAME.to_string(),
path.to_string(),
];
run!(args, test_def_tys).unwrap();
}
fn generate_input(path: &str) -> std::io::Result<()> {
let mut file = std::fs::File::create(path)?;
write!(
file,
r#"
use std::hint::black_box;
enum Mono {{
A,
B(i32),
C {{ a: i32, b: u32 }},
}}
enum Poly<T> {{
A,
B(T),
C {{ t: T }},
}}
pub fn main() {{
mono();
poly();
poly2::<i32>(1);
}}
fn mono() {{
black_box(Mono::A);
black_box(Mono::B(6));
black_box(Mono::C {{a: 1, b: 10 }});
}}
fn poly() {{
black_box(Poly::<i32>::A);
black_box(Poly::B(1i32));
black_box(Poly::C {{ t: 1i32 }});
}}
fn poly2<T: Copy>(t: T) {{
black_box(Poly::<T>::A);
black_box(Poly::B(t));
black_box(Poly::C {{ t: t }});
}}
"#
)?;
Ok(())
}