Auto merge of #145450 - Kobzol:rollup-cqclix0, r=Kobzol

Rollup of 11 pull requests

Successful merges:

 - rust-lang/rust#144210 (std: thread: Return error if setting thread stack size fails)
 - rust-lang/rust#145310 (Reduce usage of `compiler_for` in bootstrap)
 - rust-lang/rust#145311 (ci: clean windows disk space in background)
 - rust-lang/rust#145340 (Split codegen backend check step into two and don't run it with `x check compiler`)
 - rust-lang/rust#145408 (Deduplicate -L search paths)
 - rust-lang/rust#145412 (Windows: Replace `GetThreadId`+`GetCurrentThread` with `GetCurrentThreadId`)
 - rust-lang/rust#145413 (bootstrap: Reduce dependencies)
 - rust-lang/rust#145426 (Fix typos in bootstrap.example.toml)
 - rust-lang/rust#145430 (Fix wrong spans with external macros in the `dropping_copy_types` lint)
 - rust-lang/rust#145431 (Enhance UI test output handling for runtime errors)
 - rust-lang/rust#145448 (Autolabel `src/tools/{rustfmt,rust-analyzer}` changes with `T-{rustfmt,rust-analyzer}`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-08-15 15:44:03 +00:00
commit 8b1889cc22
25 changed files with 477 additions and 338 deletions

View file

@ -223,6 +223,11 @@ jobs:
cd src/ci/citool
CARGO_INCREMENTAL=0 CARGO_TARGET_DIR=../../../build/citool cargo build
- name: wait for Windows disk cleanup to finish
if: ${{ matrix.free_disk && startsWith(matrix.os, 'windows-') }}
run: |
python3 src/ci/scripts/free-disk-space-windows-wait.py
- name: run the build
run: |
set +e

View file

@ -9,7 +9,7 @@
# a custom configuration file can also be specified with `--config` to the build
# system.
#
# Note that the following are equivelent, for more details see <https://toml.io/en/v1.0.0>.
# Note that the following are equivalent, for more details see <https://toml.io/en/v1.0.0>.
#
# build.verbose = 1
#
@ -345,9 +345,9 @@
# want to use vendoring. See https://forge.rust-lang.org/infra/other-installation-methods.html#source-code.
#build.vendor = if "is a tarball source" && "vendor" dir exists && ".cargo/config.toml" file exists { true } else { false }
# Typically the build system will build the Rust compiler twice. The second
# compiler, however, will simply use its own libraries to link against. If you
# would rather to perform a full bootstrap, compiling the compiler three times,
# If you build the compiler more than twice (stage3+) or the standard library more than once
# (stage 2+), the third compiler and second library will get uplifted from stage2 and stage1,
# respectively. If you would like to disable this uplifting, and rather perform a full bootstrap,
# then you can set this option to true.
#
# This is only useful for verifying that rustc generates reproducible builds.
@ -482,7 +482,7 @@
# Use `--extra-checks=''` to temporarily disable all extra checks.
#
# Automatically enabled in the "tools" profile.
# Set to the empty string to force disable (recommeded for hdd systems).
# Set to the empty string to force disable (recommended for hdd systems).
#build.tidy-extra-checks = ""
# Indicates whether ccache is used when building certain artifacts (e.g. LLVM).

View file

@ -151,7 +151,7 @@ impl<'tcx> LateLintPass<'tcx> for DropForgetUseless {
&& let Node::Stmt(stmt) = node
&& let StmtKind::Semi(e) = stmt.kind
&& e.hir_id == expr.hir_id
&& let Some(arg_span) = arg.span.find_ancestor_inside(expr.span)
&& let Some(arg_span) = arg.span.find_ancestor_inside_same_ctxt(expr.span)
{
UseLetUnderscoreIgnoreSuggestion::Suggestion {
start_span: expr.span.shrink_to_lo().until(arg_span),

View file

@ -2847,16 +2847,27 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M
// This is the location used by the `rustc-dev` `rustup` component.
real_source_base_dir("lib/rustlib/rustc-src/rust", "compiler/rustc/src/main.rs");
let mut search_paths = vec![];
for s in &matches.opt_strs("L") {
search_paths.push(SearchPath::from_cli_opt(
sysroot.path(),
&target_triple,
early_dcx,
s,
unstable_opts.unstable_options,
));
}
// We eagerly scan all files in each passed -L path. If the same directory is passed multiple
// times, and the directory contains a lot of files, this can take a lot of time.
// So we remove -L paths that were passed multiple times, and keep only the first occurrence.
// We still have to keep the original order of the -L arguments.
let search_paths: Vec<SearchPath> = {
let mut seen_search_paths = FxHashSet::default();
let search_path_matches: Vec<String> = matches.opt_strs("L");
search_path_matches
.iter()
.filter(|p| seen_search_paths.insert(*p))
.map(|path| {
SearchPath::from_cli_opt(
sysroot.path(),
&target_triple,
early_dcx,
&path,
unstable_opts.unstable_options,
)
})
.collect()
};
let working_dir = std::env::current_dir().unwrap_or_else(|e| {
early_dcx.early_fatal(format!("Current directory is invalid: {e}"));

View file

@ -77,7 +77,18 @@ impl Thread {
let page_size = os::page_size();
let stack_size =
(stack_size + page_size - 1) & (-(page_size as isize - 1) as usize - 1);
assert_eq!(libc::pthread_attr_setstacksize(attr.as_mut_ptr(), stack_size), 0);
// Some libc implementations, e.g. musl, place an upper bound
// on the stack size, in which case we can only gracefully return
// an error here.
if libc::pthread_attr_setstacksize(attr.as_mut_ptr(), stack_size) != 0 {
assert_eq!(libc::pthread_attr_destroy(attr.as_mut_ptr()), 0);
drop(Box::from_raw(data));
return Err(io::const_error!(
io::ErrorKind::InvalidInput,
"invalid stack size"
));
}
}
};
}

View file

@ -2158,6 +2158,7 @@ GetCurrentDirectoryW
GetCurrentProcess
GetCurrentProcessId
GetCurrentThread
GetCurrentThreadId
GetEnvironmentStringsW
GetEnvironmentVariableW
GetExitCodeProcess
@ -2185,7 +2186,6 @@ GetSystemInfo
GetSystemTimeAsFileTime
GetSystemTimePreciseAsFileTime
GetTempPathW
GetThreadId
GetUserProfileDirectoryW
GetWindowsDirectoryW
HANDLE

View file

@ -38,6 +38,7 @@ windows_targets::link!("kernel32.dll" "system" fn GetCurrentDirectoryW(nbufferle
windows_targets::link!("kernel32.dll" "system" fn GetCurrentProcess() -> HANDLE);
windows_targets::link!("kernel32.dll" "system" fn GetCurrentProcessId() -> u32);
windows_targets::link!("kernel32.dll" "system" fn GetCurrentThread() -> HANDLE);
windows_targets::link!("kernel32.dll" "system" fn GetCurrentThreadId() -> u32);
windows_targets::link!("kernel32.dll" "system" fn GetEnvironmentStringsW() -> PWSTR);
windows_targets::link!("kernel32.dll" "system" fn GetEnvironmentVariableW(lpname : PCWSTR, lpbuffer : PWSTR, nsize : u32) -> u32);
windows_targets::link!("kernel32.dll" "system" fn GetExitCodeProcess(hprocess : HANDLE, lpexitcode : *mut u32) -> BOOL);
@ -61,7 +62,6 @@ windows_targets::link!("kernel32.dll" "system" fn GetSystemInfo(lpsysteminfo : *
windows_targets::link!("kernel32.dll" "system" fn GetSystemTimeAsFileTime(lpsystemtimeasfiletime : *mut FILETIME));
windows_targets::link!("kernel32.dll" "system" fn GetSystemTimePreciseAsFileTime(lpsystemtimeasfiletime : *mut FILETIME));
windows_targets::link!("kernel32.dll" "system" fn GetTempPathW(nbufferlength : u32, lpbuffer : PWSTR) -> u32);
windows_targets::link!("kernel32.dll" "system" fn GetThreadId(thread : HANDLE) -> u32);
windows_targets::link!("userenv.dll" "system" fn GetUserProfileDirectoryW(htoken : HANDLE, lpprofiledir : PWSTR, lpcchsize : *mut u32) -> BOOL);
windows_targets::link!("kernel32.dll" "system" fn GetWindowsDirectoryW(lpbuffer : PWSTR, usize : u32) -> u32);
windows_targets::link!("kernel32.dll" "system" fn InitOnceBeginInitialize(lpinitonce : *mut INIT_ONCE, dwflags : u32, fpending : *mut BOOL, lpcontext : *mut *mut core::ffi::c_void) -> BOOL);

View file

@ -129,7 +129,7 @@ impl Thread {
pub(crate) fn current_os_id() -> Option<u64> {
// SAFETY: FFI call with no preconditions.
let id: u32 = unsafe { c::GetThreadId(c::GetCurrentThread()) };
let id: u32 = unsafe { c::GetCurrentThreadId() };
// A return value of 0 indicates failed lookup.
if id == 0 { None } else { Some(id.into()) }

View file

@ -48,7 +48,6 @@ dependencies = [
"clap",
"clap_complete",
"cmake",
"fd-lock",
"home",
"ignore",
"insta",
@ -268,17 +267,6 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fd-lock"
version = "4.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
dependencies = [
"cfg-if",
"rustix",
"windows-sys 0.59.0",
]
[[package]]
name = "filetime"
version = "0.2.25"
@ -759,13 +747,12 @@ dependencies = [
[[package]]
name = "tar"
version = "0.4.43"
version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c65998313f8e17d0d553d28f91a0df93e4dbbbf770279c7bc21ca0f09ea1a1f6"
checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
@ -1147,16 +1134,6 @@ dependencies = [
"bitflags",
]
[[package]]
name = "xattr"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e"
dependencies = [
"libc",
"rustix",
]
[[package]]
name = "xz2"
version = "0.1.7"

View file

@ -38,7 +38,6 @@ cmake = "=0.1.54"
build_helper = { path = "../build_helper" }
clap = { version = "4.4", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] }
clap_complete = "4.4"
fd-lock = "4.0"
home = "0.5"
ignore = "0.4"
libc = "0.2"
@ -51,7 +50,7 @@ serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
sha2 = "0.10"
tar = "0.4"
tar = { version = "0.4.44", default-features = false }
termcolor = "1.4"
toml = "0.5"
walkdir = "2.4"

View file

@ -5,8 +5,8 @@
//! parent directory, and otherwise documentation can be found throughout the `build`
//! directory in each respective module.
use std::fs::{self, OpenOptions};
use std::io::{self, BufRead, BufReader, IsTerminal, Write};
use std::fs::{self, OpenOptions, TryLockError};
use std::io::{self, BufRead, BufReader, IsTerminal, Read, Write};
use std::path::Path;
use std::str::FromStr;
use std::time::Instant;
@ -39,38 +39,34 @@ fn main() {
let config = Config::parse(flags);
let mut build_lock;
let _build_lock_guard;
if !config.bypass_bootstrap_lock {
// Display PID of process holding the lock
// PID will be stored in a lock file
let lock_path = config.out.join("lock");
let pid = fs::read_to_string(&lock_path);
build_lock = fd_lock::RwLock::new(t!(fs::OpenOptions::new()
build_lock = t!(fs::OpenOptions::new()
.read(true)
.write(true)
.truncate(true)
.create(true)
.open(&lock_path)));
_build_lock_guard = match build_lock.try_write() {
Ok(mut lock) => {
t!(lock.write(process::id().to_string().as_ref()));
lock
.truncate(false)
.open(&lock_path));
t!(build_lock.try_lock().or_else(|e| {
if let TryLockError::Error(e) = e {
return Err(e);
}
err => {
drop(err);
// #135972: We can reach this point when the lock has been taken,
// but the locker has not yet written its PID to the file
if let Some(pid) = pid.ok().filter(|pid| !pid.is_empty()) {
println!("WARNING: build directory locked by process {pid}, waiting for lock");
} else {
println!("WARNING: build directory locked, waiting for lock");
}
let mut lock = t!(build_lock.write());
t!(lock.write(process::id().to_string().as_ref()));
lock
let mut pid = String::new();
t!(build_lock.read_to_string(&mut pid));
// #135972: We can reach this point when the lock has been taken,
// but the locker has not yet written its PID to the file
if !pid.is_empty() {
println!("WARNING: build directory locked by process {pid}, waiting for lock");
} else {
println!("WARNING: build directory locked, waiting for lock");
}
};
build_lock.lock()
}));
t!(build_lock.set_len(0));
t!(build_lock.write_all(process::id().to_string().as_bytes()));
}
// check_version warnings are not printed during setup, or during CI

View file

@ -314,41 +314,31 @@ pub fn prepare_compiler_for_check(
}
}
/// Checks a single codegen backend.
/// Check the Cranelift codegen backend.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CodegenBackend {
pub build_compiler: Compiler,
pub target: TargetSelection,
pub backend: CodegenBackendKind,
pub struct CraneliftCodegenBackend {
build_compiler: Compiler,
target: TargetSelection,
}
impl Step for CodegenBackend {
impl Step for CraneliftCodegenBackend {
type Output = ();
const IS_HOST: bool = true;
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.paths(&["compiler/rustc_codegen_cranelift", "compiler/rustc_codegen_gcc"])
run.alias("rustc_codegen_cranelift").alias("cg_clif")
}
fn make_run(run: RunConfig<'_>) {
// FIXME: only check the backend(s) that were actually selected in run.paths
let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::Codegen);
for backend in [CodegenBackendKind::Cranelift, CodegenBackendKind::Gcc] {
run.builder.ensure(CodegenBackend { build_compiler, target: run.target, backend });
}
run.builder.ensure(CraneliftCodegenBackend { build_compiler, target: run.target });
}
fn run(self, builder: &Builder<'_>) {
// FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved
if builder.build.config.vendor && self.backend.is_gcc() {
println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled.");
return;
}
let build_compiler = self.build_compiler;
let target = self.target;
let backend = self.backend;
let mut cargo = builder::Cargo::new(
builder,
@ -361,31 +351,104 @@ impl Step for CodegenBackend {
cargo
.arg("--manifest-path")
.arg(builder.src.join(format!("compiler/{}/Cargo.toml", backend.crate_name())));
.arg(builder.src.join("compiler/rustc_codegen_cranelift/Cargo.toml"));
rustc_cargo_env(builder, &mut cargo, target);
let _guard = builder.msg(
Kind::Check,
backend.crate_name(),
"rustc_codegen_cranelift",
Mode::Codegen,
self.build_compiler,
target,
);
let stamp = build_stamp::codegen_backend_stamp(builder, build_compiler, target, &backend)
.with_prefix("check");
let stamp = build_stamp::codegen_backend_stamp(
builder,
build_compiler,
target,
&CodegenBackendKind::Cranelift,
)
.with_prefix("check");
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(
StepMetadata::check(&self.backend.crate_name(), self.target)
StepMetadata::check("rustc_codegen_cranelift", self.target)
.built_by(self.build_compiler),
)
}
}
/// Check the GCC codegen backend.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GccCodegenBackend {
build_compiler: Compiler,
target: TargetSelection,
}
impl Step for GccCodegenBackend {
type Output = ();
const IS_HOST: bool = true;
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.alias("rustc_codegen_gcc").alias("cg_gcc")
}
fn make_run(run: RunConfig<'_>) {
let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::Codegen);
run.builder.ensure(GccCodegenBackend { build_compiler, target: run.target });
}
fn run(self, builder: &Builder<'_>) {
// FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved
if builder.build.config.vendor {
println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled.");
return;
}
let build_compiler = self.build_compiler;
let target = self.target;
let mut cargo = builder::Cargo::new(
builder,
build_compiler,
Mode::Codegen,
SourceType::InTree,
target,
builder.kind,
);
cargo.arg("--manifest-path").arg(builder.src.join("compiler/rustc_codegen_gcc/Cargo.toml"));
rustc_cargo_env(builder, &mut cargo, target);
let _guard = builder.msg(
Kind::Check,
"rustc_codegen_gcc",
Mode::Codegen,
self.build_compiler,
target,
);
let stamp = build_stamp::codegen_backend_stamp(
builder,
build_compiler,
target,
&CodegenBackendKind::Gcc,
)
.with_prefix("check");
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("rustc_codegen_gcc", self.target).built_by(self.build_compiler))
}
}
macro_rules! tool_check_step {
(
$name:ident {

View file

@ -37,11 +37,12 @@ use crate::{
debug, trace,
};
/// Build a standard library for the given `target` using the given `compiler`.
/// Build a standard library for the given `target` using the given `build_compiler`.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Std {
pub target: TargetSelection,
pub compiler: Compiler,
/// Compiler that builds the standard library.
pub build_compiler: Compiler,
/// Whether to build only a subset of crates in the standard library.
///
/// This shouldn't be used from other steps; see the comment on [`Rustc`].
@ -54,10 +55,10 @@ pub struct Std {
}
impl Std {
pub fn new(compiler: Compiler, target: TargetSelection) -> Self {
pub fn new(build_compiler: Compiler, target: TargetSelection) -> Self {
Self {
target,
compiler,
build_compiler,
crates: Default::default(),
force_recompile: false,
extra_rust_args: &[],
@ -120,7 +121,7 @@ impl Step for Std {
trace!(force_recompile);
run.builder.ensure(Std {
compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
build_compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
target: run.target,
crates,
force_recompile,
@ -138,8 +139,8 @@ impl Step for Std {
let target = self.target;
// We already have std ready to be used for stage 0.
if self.compiler.stage == 0 {
let compiler = self.compiler;
if self.build_compiler.stage == 0 {
let compiler = self.build_compiler;
builder.ensure(StdLink::from_std(self, compiler));
return;
@ -148,9 +149,10 @@ impl Step for Std {
let build_compiler = if builder.download_rustc() && self.force_recompile {
// When there are changes in the library tree with CI-rustc, we want to build
// the stageN library and that requires using stageN-1 compiler.
builder.compiler(self.compiler.stage.saturating_sub(1), builder.config.host_target)
builder
.compiler(self.build_compiler.stage.saturating_sub(1), builder.config.host_target)
} else {
self.compiler
self.build_compiler
};
// When using `download-rustc`, we already have artifacts for the host available. Don't
@ -187,51 +189,50 @@ impl Step for Std {
let mut target_deps = builder.ensure(StartupObjects { compiler: build_compiler, target });
let compiler_to_use =
builder.compiler_for(build_compiler.stage, build_compiler.host, target);
trace!(?compiler_to_use);
// Stage of the stdlib that we're building
let stage = build_compiler.stage;
if compiler_to_use != build_compiler
// Never uplift std unless we have compiled stage 1; if stage 1 is compiled,
// uplift it from there.
//
// FIXME: improve `fn compiler_for` to avoid adding stage condition here.
&& build_compiler.stage > 1
// If we're building a stage2+ libstd, full bootstrap is
// disabled and we have a stage1 libstd already compiled for the given target,
// then simply uplift a previously built stage1 library.
if build_compiler.stage > 1
&& !builder.config.full_bootstrap
// This estimates if a stage1 libstd exists for the given target. If we're not
// cross-compiling, it should definitely exist by the time we're building a stage2
// libstd.
// Or if we are cross-compiling, and we are building a cross-compiled rustc, then that
// rustc needs to link to a cross-compiled libstd, so again we should have a stage1
// libstd for the given target prepared.
// Even if we guess wrong in the cross-compiled case, the worst that should happen is
// that we build a fresh stage1 libstd below, and then we immediately uplift it, so we
// don't pay the libstd build cost twice.
&& (target == builder.host_target || builder.config.hosts.contains(&target))
{
trace!(
?compiler_to_use,
?build_compiler,
"build_compiler != compiler_to_use, uplifting library"
);
let build_compiler_for_std_to_uplift = builder.compiler(1, builder.host_target);
builder.std(build_compiler_for_std_to_uplift, target);
builder.std(compiler_to_use, target);
let msg = if compiler_to_use.host == target {
let msg = if build_compiler_for_std_to_uplift.host == target {
format!(
"Uplifting library (stage{} -> stage{})",
compiler_to_use.stage, build_compiler.stage
"Uplifting library (stage{} -> stage{stage})",
build_compiler_for_std_to_uplift.stage
)
} else {
format!(
"Uplifting library (stage{}:{} -> stage{}:{})",
compiler_to_use.stage, compiler_to_use.host, build_compiler.stage, target
"Uplifting library (stage{}:{} -> stage{stage}:{target})",
build_compiler_for_std_to_uplift.stage, build_compiler_for_std_to_uplift.host,
)
};
builder.info(&msg);
// Even if we're not building std this stage, the new sysroot must
// still contain the third party objects needed by various targets.
self.copy_extra_objects(builder, &build_compiler, target);
builder.ensure(StdLink::from_std(self, compiler_to_use));
builder.ensure(StdLink::from_std(self, build_compiler_for_std_to_uplift));
return;
}
trace!(
?compiler_to_use,
?build_compiler,
"compiler == compiler_to_use, handling not-cross-compile scenario"
);
target_deps.extend(self.copy_extra_objects(builder, &build_compiler, target));
// We build a sysroot for mir-opt tests using the same trick that Miri does: A check build
@ -299,7 +300,7 @@ impl Step for Std {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::build("std", self.target).built_by(self.compiler))
Some(StepMetadata::build("std", self.target).built_by(self.build_compiler))
}
}
@ -665,6 +666,14 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, cargo: &mut Car
cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)");
}
/// Link all libstd rlibs/dylibs into a sysroot of `target_compiler`.
///
/// Links those artifacts generated by `compiler` to the `stage` compiler's
/// sysroot for the specified `host` and `target`.
///
/// Note that this assumes that `compiler` has already generated the libstd
/// libraries for `target`, and this method will find them in the relevant
/// output directory.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StdLink {
pub compiler: Compiler,
@ -680,7 +689,7 @@ impl StdLink {
pub fn from_std(std: Std, host_compiler: Compiler) -> Self {
Self {
compiler: host_compiler,
target_compiler: std.compiler,
target_compiler: std.build_compiler,
target: std.target,
crates: std.crates,
force_recompile: std.force_recompile,
@ -951,14 +960,8 @@ impl Rustc {
}
impl Step for Rustc {
/// We return the stage of the "actual" compiler (not the uplifted one).
///
/// By "actual" we refer to the uplifting logic where we may not compile the requested stage;
/// instead, we uplift it from the previous stages. Which can lead to bootstrap failures in
/// specific situations where we request stage X from other steps. However we may end up
/// uplifting it from stage Y, causing the other stage to fail when attempting to link with
/// stage X which was never actually built.
type Output = u32;
type Output = ();
const IS_HOST: bool = true;
const DEFAULT: bool = false;
@ -997,7 +1000,7 @@ impl Step for Rustc {
/// This will build the compiler for a particular stage of the build using
/// the `build_compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder<'_>) -> u32 {
fn run(self, builder: &Builder<'_>) {
let build_compiler = self.build_compiler;
let target = self.target;
@ -1013,7 +1016,7 @@ impl Step for Rustc {
&sysroot,
builder.config.ci_rustc_dev_contents(),
);
return build_compiler.stage;
return;
}
// Build a standard library for `target` using the `build_compiler`.
@ -1027,31 +1030,33 @@ impl Step for Rustc {
builder.info("WARNING: Use `--keep-stage-std` if you want to rebuild the compiler when it changes");
builder.ensure(RustcLink::from_rustc(self, build_compiler));
return build_compiler.stage;
return;
}
let compiler_to_use =
builder.compiler_for(build_compiler.stage, build_compiler.host, target);
if compiler_to_use != build_compiler {
builder.ensure(Rustc::new(compiler_to_use, target));
let msg = if compiler_to_use.host == target {
format!(
"Uplifting rustc (stage{} -> stage{})",
compiler_to_use.stage,
build_compiler.stage + 1
)
// The stage of the compiler that we're building
let stage = build_compiler.stage + 1;
// If we are building a stage3+ compiler, and full bootstrap is disabled, and we have a
// previous rustc available, we will uplift a compiler from a previous stage.
if build_compiler.stage >= 2
&& !builder.config.full_bootstrap
&& (target == builder.host_target || builder.hosts.contains(&target))
{
// If we're cross-compiling, the earliest rustc that we could have is stage 2.
// If we're not cross-compiling, then we should have rustc stage 1.
let stage_to_uplift = if target == builder.host_target { 1 } else { 2 };
let rustc_to_uplift = builder.compiler(stage_to_uplift, target);
let msg = if rustc_to_uplift.host == target {
format!("Uplifting rustc (stage{} -> stage{stage})", rustc_to_uplift.stage,)
} else {
format!(
"Uplifting rustc (stage{}:{} -> stage{}:{})",
compiler_to_use.stage,
compiler_to_use.host,
build_compiler.stage + 1,
target
"Uplifting rustc (stage{}:{} -> stage{stage}:{target})",
rustc_to_uplift.stage, rustc_to_uplift.host,
)
};
builder.info(&msg);
builder.ensure(RustcLink::from_rustc(self, compiler_to_use));
return compiler_to_use.stage;
builder.ensure(RustcLink::from_rustc(self, rustc_to_uplift));
return;
}
// Build a standard library for the current host target using the `build_compiler`.
@ -1128,8 +1133,6 @@ impl Step for Rustc {
self,
builder.compiler(build_compiler.stage, builder.config.host_target),
));
build_compiler.stage
}
fn metadata(&self) -> Option<StepMetadata> {
@ -1909,12 +1912,18 @@ impl Step for Sysroot {
}
}
/// Prepare a compiler sysroot.
///
/// The sysroot may contain various things useful for running the compiler, like linkers and
/// linker wrappers (LLD, LLVM bitcode linker, etc.).
///
/// This will assemble a compiler in `build/$target/stage$stage`.
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
pub struct Assemble {
/// The compiler which we will produce in this step. Assemble itself will
/// take care of ensuring that the necessary prerequisites to do so exist,
/// that is, this target can be a stage2 compiler and Assemble will build
/// previous stages for you.
/// that is, this can be e.g. a stage2 compiler and Assemble will build
/// the previous stages for you.
pub target_compiler: Compiler,
}
@ -1932,11 +1941,6 @@ impl Step for Assemble {
});
}
/// Prepare a new compiler from the artifacts in `stage`
///
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
/// must have been previously produced by the `stage - 1` builder.build
/// compiler.
fn run(self, builder: &Builder<'_>) -> Compiler {
let target_compiler = self.target_compiler;
@ -2065,7 +2069,7 @@ impl Step for Assemble {
target_compiler.stage - 1,
builder.config.host_target,
);
let mut build_compiler =
let build_compiler =
builder.compiler(target_compiler.stage - 1, builder.config.host_target);
// Build enzyme
@ -2089,24 +2093,13 @@ impl Step for Assemble {
}
// Build the libraries for this compiler to link to (i.e., the libraries
// it uses at runtime). NOTE: Crates the target compiler compiles don't
// link to these. (FIXME: Is that correct? It seems to be correct most
// of the time but I think we do link to these for stage2/bin compilers
// when not performing a full bootstrap).
// it uses at runtime).
debug!(
?build_compiler,
"target_compiler.host" = ?target_compiler.host,
"building compiler libraries to link to"
);
let actual_stage = builder.ensure(Rustc::new(build_compiler, target_compiler.host));
// Current build_compiler.stage might be uplifted instead of being built; so update it
// to not fail while linking the artifacts.
debug!(
"(old) build_compiler.stage" = build_compiler.stage,
"(adjusted) build_compiler.stage" = actual_stage,
"temporarily adjusting `build_compiler.stage` to account for uplifted libraries"
);
build_compiler.stage = actual_stage;
builder.ensure(Rustc::new(build_compiler, target_compiler.host));
let stage = target_compiler.stage;
let host = target_compiler.host;

View file

@ -1042,7 +1042,8 @@ impl<'a> Builder<'a> {
Kind::Check | Kind::Fix => describe!(
check::Rustc,
check::Rustdoc,
check::CodegenBackend,
check::CraneliftCodegenBackend,
check::GccCodegenBackend,
check::Clippy,
check::Miri,
check::CargoMiri,

View file

@ -1514,12 +1514,7 @@ mod snapshot {
insta::assert_snapshot!(
ctx.config("check")
.path("compiler")
.render_steps(), @r"
[check] rustc 0 <host> -> rustc 1 <host> (73 crates)
[check] rustc 0 <host> -> rustc 1 <host>
[check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host>
[check] rustc 0 <host> -> rustc_codegen_gcc 1 <host>
");
.render_steps(), @"[check] rustc 0 <host> -> rustc 1 <host> (73 crates)");
}
#[test]
@ -1545,12 +1540,7 @@ mod snapshot {
ctx.config("check")
.path("compiler")
.stage(1)
.render_steps(), @r"
[check] rustc 0 <host> -> rustc 1 <host> (73 crates)
[check] rustc 0 <host> -> rustc 1 <host>
[check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host>
[check] rustc 0 <host> -> rustc_codegen_gcc 1 <host>
");
.render_steps(), @"[check] rustc 0 <host> -> rustc 1 <host> (73 crates)");
}
#[test]
@ -1565,9 +1555,6 @@ mod snapshot {
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[check] rustc 1 <host> -> rustc 2 <host> (73 crates)
[check] rustc 1 <host> -> rustc 2 <host>
[check] rustc 1 <host> -> rustc_codegen_cranelift 2 <host>
[check] rustc 1 <host> -> rustc_codegen_gcc 2 <host>
");
}
@ -1679,12 +1666,7 @@ mod snapshot {
ctx.config("check")
.paths(&["library", "compiler"])
.args(&args)
.render_steps(), @r"
[check] rustc 0 <host> -> rustc 1 <host> (73 crates)
[check] rustc 0 <host> -> rustc 1 <host>
[check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host>
[check] rustc 0 <host> -> rustc_codegen_gcc 1 <host>
");
.render_steps(), @"[check] rustc 0 <host> -> rustc 1 <host> (73 crates)");
}
#[test]
@ -1768,7 +1750,6 @@ mod snapshot {
.render_steps(), @r"
[check] rustc 0 <host> -> rustc 1 <host>
[check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host>
[check] rustc 0 <host> -> rustc_codegen_gcc 1 <host>
");
}
@ -2068,130 +2049,6 @@ mod snapshot {
[doc] rustc 1 <host> -> reference (book) 2 <host>
");
}
#[test]
fn clippy_ci() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("ci")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 0 <host> -> clippy-driver 1 <host>
[build] rustc 0 <host> -> cargo-clippy 1 <host>
[clippy] rustc 1 <host> -> bootstrap 2 <host>
[clippy] rustc 1 <host> -> std 1 <host>
[clippy] rustc 1 <host> -> rustc 2 <host>
[check] rustc 1 <host> -> rustc 2 <host>
[clippy] rustc 1 <host> -> rustc_codegen_gcc 2 <host>
");
}
#[test]
fn clippy_compiler_stage1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("compiler")
.render_steps(), @r"
[build] llvm <host>
[clippy] rustc 0 <host> -> rustc 1 <host>
");
}
#[test]
fn clippy_compiler_stage2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("compiler")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 0 <host> -> clippy-driver 1 <host>
[build] rustc 0 <host> -> cargo-clippy 1 <host>
[clippy] rustc 1 <host> -> rustc 2 <host>
");
}
#[test]
fn clippy_std_stage1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("std")
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 0 <host> -> clippy-driver 1 <host>
[build] rustc 0 <host> -> cargo-clippy 1 <host>
[clippy] rustc 1 <host> -> std 1 <host>
");
}
#[test]
fn clippy_std_stage2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("std")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> rustc 2 <host>
[build] rustc 1 <host> -> clippy-driver 2 <host>
[build] rustc 1 <host> -> cargo-clippy 2 <host>
[clippy] rustc 2 <host> -> std 2 <host>
");
}
#[test]
fn clippy_miri_stage1() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("miri")
.stage(1)
.render_steps(), @r"
[build] llvm <host>
[check] rustc 0 <host> -> rustc 1 <host>
[clippy] rustc 0 <host> -> miri 1 <host>
");
}
#[test]
fn clippy_miri_stage2() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("miri")
.stage(2)
.render_steps(), @r"
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[check] rustc 1 <host> -> rustc 2 <host>
[build] rustc 0 <host> -> clippy-driver 1 <host>
[build] rustc 0 <host> -> cargo-clippy 1 <host>
[clippy] rustc 1 <host> -> miri 2 <host>
");
}
#[test]
fn clippy_bootstrap() {
let ctx = TestCtx::new();
insta::assert_snapshot!(
ctx.config("clippy")
.path("bootstrap")
.render_steps(), @"[clippy] rustc 0 <host> -> bootstrap 1 <host>");
}
}
struct ExecutedSteps {

View file

@ -0,0 +1,72 @@
"""
Start freeing disk space on Windows in the background by launching
the PowerShell cleanup script, and recording the PID in a file,
so later steps can wait for completion.
"""
import subprocess
from pathlib import Path
from free_disk_space_windows_util import get_pid_file, get_log_file, run_main
def get_cleanup_script() -> Path:
script_dir = Path(__file__).resolve().parent
cleanup_script = script_dir / "free-disk-space-windows.ps1"
if not cleanup_script.exists():
raise Exception(f"Cleanup script '{cleanup_script}' not found")
return cleanup_script
def write_pid(pid: int):
pid_file = get_pid_file()
if pid_file.exists():
raise Exception(f"Pid file '{pid_file}' already exists")
pid_file.write_text(str(pid))
print(f"wrote pid {pid} in file {pid_file}")
def launch_cleanup_process():
cleanup_script = get_cleanup_script()
log_file_path = get_log_file()
# Launch the PowerShell cleanup in the background and redirect logs.
try:
with open(log_file_path, "w", encoding="utf-8") as log_file:
proc = subprocess.Popen(
[
"pwsh",
# Suppress PowerShell startup banner/logo for cleaner logs.
"-NoLogo",
# Don't load user/system profiles. Ensures a clean, predictable environment.
"-NoProfile",
# Disable interactive prompts. Required for CI to avoid hangs.
"-NonInteractive",
# Execute the specified script file (next argument).
"-File",
str(cleanup_script),
],
# Write child stdout to the log file.
stdout=log_file,
# Merge stderr into stdout for a single, ordered log stream.
stderr=subprocess.STDOUT,
)
print(
f"Started free-disk-space cleanup in background. "
f"pid={proc.pid}; log_file={log_file_path}"
)
return proc
except FileNotFoundError as e:
raise Exception("pwsh not found on PATH; cannot start disk cleanup.") from e
def main() -> int:
proc = launch_cleanup_process()
# Write pid of the process to a file, so that later steps can read it and wait
# until the process completes.
write_pid(proc.pid)
return 0
if __name__ == "__main__":
run_main(main)

View file

@ -0,0 +1,77 @@
"""
Wait for the background Windows disk cleanup process.
"""
import ctypes
import time
from free_disk_space_windows_util import get_pid_file, get_log_file, run_main
def is_process_running(pid: int) -> bool:
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
processHandle = ctypes.windll.kernel32.OpenProcess(
PROCESS_QUERY_LIMITED_INFORMATION, 0, pid
)
if processHandle == 0:
# The process is not running.
# If you don't have the sufficient rights to check if a process is running,
# zero is also returned. But in GitHub Actions we have these rights.
return False
else:
ctypes.windll.kernel32.CloseHandle(processHandle)
return True
def print_logs():
"""Print the logs from the cleanup script."""
log_file = get_log_file()
if log_file.exists():
print("free-disk-space logs:")
# Print entire log; replace undecodable bytes to avoid exceptions.
try:
with open(log_file, "r", encoding="utf-8", errors="replace") as f:
print(f.read())
except Exception as e:
raise Exception(f"Failed to read log file '{log_file}'") from e
else:
print(f"::warning::Log file '{log_file}' not found")
def read_pid_from_file() -> int:
"""Read the PID from the pid file."""
pid_file = get_pid_file()
if not pid_file.exists():
raise Exception(
f"No background free-disk-space process to wait for: pid file {pid_file} not found"
)
pid_file_content = pid_file.read_text().strip()
# Delete the file if it exists
pid_file.unlink(missing_ok=True)
try:
# Read the first line and convert to int.
pid = int(pid_file_content.splitlines()[0])
return pid
except Exception as e:
raise Exception(
f"Error while parsing the pid file with content '{pid_file_content!r}'"
) from e
def main() -> int:
pid = read_pid_from_file()
# Poll until process exits
while is_process_running(pid):
time.sleep(3)
print_logs()
return 0
if __name__ == "__main__":
run_main(main)

View file

@ -4,7 +4,7 @@ set -euo pipefail
script_dir=$(dirname "$0")
if [[ "${RUNNER_OS:-}" == "Windows" ]]; then
pwsh $script_dir/free-disk-space-windows.ps1
python3 "$script_dir/free-disk-space-windows-start.py"
else
$script_dir/free-disk-space-linux.sh
fi

View file

@ -0,0 +1,29 @@
"""
Utilities for Windows disk space cleanup scripts.
"""
import os
from pathlib import Path
import sys
def get_temp_dir() -> Path:
"""Get the temporary directory set by GitHub Actions."""
return Path(os.environ.get("RUNNER_TEMP"))
def get_pid_file() -> Path:
return get_temp_dir() / "free-disk-space.pid"
def get_log_file() -> Path:
return get_temp_dir() / "free-disk-space.log"
def run_main(main_fn):
exit_code = 1
try:
exit_code = main_fn()
except Exception as e:
print(f"::error::{e}")
sys.exit(exit_code)

View file

@ -16,7 +16,7 @@ use std::{fmt, panic, str};
pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder};
pub(crate) use markdown::test as test_markdown;
use rustc_data_structures::fx::{FxHashMap, FxHasher, FxIndexMap, FxIndexSet};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxHasher, FxIndexMap, FxIndexSet};
use rustc_errors::emitter::HumanReadableErrorType;
use rustc_errors::{ColorConfig, DiagCtxtHandle};
use rustc_hir as hir;
@ -689,6 +689,10 @@ fn run_test(
"--extern=doctest_bundle_{edition}=",
edition = doctest.edition
));
// Deduplicate passed -L directory paths, since usually all dependencies will be in the
// same directory (e.g. target/debug/deps from Cargo).
let mut seen_search_dirs = FxHashSet::default();
for extern_str in &rustdoc_options.extern_strs {
if let Some((_cratename, path)) = extern_str.split_once('=') {
// Direct dependencies of the tests themselves are
@ -698,7 +702,9 @@ fn run_test(
.parent()
.filter(|x| x.components().count() > 0)
.unwrap_or(Path::new("."));
runner_compiler.arg("-L").arg(dir);
if seen_search_dirs.insert(dir) {
runner_compiler.arg("-L").arg(dir);
}
}
}
let output_bundle_file = doctest

View file

@ -10,6 +10,7 @@ use super::{
TestCx, TestOutput, Truncated, UI_FIXED, WillExecute,
};
use crate::json;
use crate::runtest::ProcRes;
impl TestCx<'_> {
pub(super) fn run_ui_test(&self) {
@ -127,6 +128,9 @@ impl TestCx<'_> {
);
}
// If the test is executed, capture its ProcRes separately so that
// pattern/forbid checks can report the *runtime* stdout/stderr when they fail.
let mut run_proc_res: Option<ProcRes> = None;
let output_to_check = if let WillExecute::Yes = should_run {
let proc_res = self.exec_compiled_test();
let run_output_errors = if self.props.check_run_results {
@ -189,7 +193,10 @@ impl TestCx<'_> {
unreachable!("run_ui_test() must not be called if the test should not run");
}
self.get_output(&proc_res)
let output = self.get_output(&proc_res);
// Move the proc_res into our option after we've extracted output.
run_proc_res = Some(proc_res);
output
} else {
self.get_output(&proc_res)
};
@ -200,9 +207,14 @@ impl TestCx<'_> {
explicit, self.config.compare_mode, proc_res.status, self.props.error_patterns
);
// Compiler diagnostics (expected errors) are always tied to the compile-time ProcRes.
self.check_expected_errors(&proc_res);
self.check_all_error_patterns(&output_to_check, &proc_res);
self.check_forbid_output(&output_to_check, &proc_res);
// For runtime pattern/forbid checks prefer the executed program's ProcRes if available
// so that missing pattern failures include the program's stdout/stderr.
let pattern_proc_res = run_proc_res.as_ref().unwrap_or(&proc_res);
self.check_all_error_patterns(&output_to_check, pattern_proc_res);
self.check_forbid_output(&output_to_check, pattern_proc_res);
if self.props.run_rustfix && self.config.compare_mode.is_none() {
// And finally, compile the fixed code and make sure it both

View file

@ -9,4 +9,7 @@ fn main() {
let mut msg = String::new();
let _ = writeln!(&mut msg, "test");
//~^ ERROR calls to `std::mem::drop`
let _ = format_args!("a");
//~^ ERROR calls to `std::mem::drop`
}

View file

@ -9,4 +9,7 @@ fn main() {
let mut msg = String::new();
drop(writeln!(&mut msg, "test"));
//~^ ERROR calls to `std::mem::drop`
drop(format_args!("a"));
//~^ ERROR calls to `std::mem::drop`
}

View file

@ -17,5 +17,19 @@ LL - drop(writeln!(&mut msg, "test"));
LL + let _ = writeln!(&mut msg, "test");
|
error: aborting due to 1 previous error
error: calls to `std::mem::drop` with a value that implements `Copy` does nothing
--> $DIR/dropping_copy_types-macros.rs:13:5
|
LL | drop(format_args!("a"));
| ^^^^^-----------------^
| |
| argument has type `Arguments<'_>`
|
help: use `let _ = ...` to ignore the expression or result
|
LL - drop(format_args!("a"));
LL + let _ = format_args!("a");
|
error: aborting due to 2 previous errors

View file

@ -627,6 +627,16 @@ trigger_files = [
"src/ci",
]
[autolabel."T-rust-analyzer"]
trigger_files = [
"src/tools/rust-analyzer",
]
[autolabel."T-rustfmt"]
trigger_files = [
"src/tools/rustfmt",
]
# ------------------------------------------------------------------------------
# Prioritization and team nominations
# ------------------------------------------------------------------------------