Merge from rust-lang/rust

This commit is contained in:
Laurențiu Nicola 2025-06-09 15:44:40 +03:00
commit 88223c56d9
3427 changed files with 107813 additions and 37739 deletions

View file

@ -89,9 +89,9 @@ dependencies = [
[[package]]
name = "cc"
version = "1.2.17"
version = "1.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a"
checksum = "5f4ac86a9e5bc1e2b3449ab9d7d3a6a405e3d1bb28d7b9be8614f55846ae3766"
dependencies = [
"shlex",
]

View file

@ -32,7 +32,7 @@ test = false
# Most of the time updating these dependencies requires modifications to the
# bootstrap codebase(e.g., https://github.com/rust-lang/rust/issues/124565);
# otherwise, some targets will fail. That's why these dependencies are explicitly pinned.
cc = "=1.2.17"
cc = "=1.2.23"
cmake = "=0.1.54"
build_helper = { path = "../build_helper" }

View file

@ -394,6 +394,7 @@ def default_build_triple(verbose):
"i686": "i686",
"i686-AT386": "i686",
"i786": "i686",
"loongarch32": "loongarch32",
"loongarch64": "loongarch64",
"m68k": "m68k",
"csky": "csky",
@ -1118,7 +1119,6 @@ class RustBuild(object):
if "RUSTFLAGS_BOOTSTRAP" in env:
env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"]
env["PATH"] = os.path.join(self.bin_root(), "bin") + os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
raise Exception("no cargo executable found at `{}`".format(self.cargo()))
args = [

View file

@ -1,17 +1,19 @@
# These defaults are meant for contributors to the standard library and documentation.
[build]
# When building the standard library, you almost never want to build the compiler itself.
build-stage = 0
test-stage = 0
bench-stage = 0
bench-stage = 1
build-stage = 1
check-stage = 1
test-stage = 1
[rust]
# This greatly increases the speed of rebuilds, especially when there are only minor changes. However, it makes the initial build slightly slower.
incremental = true
# Make the compiler and standard library faster to build, at the expense of a ~20% runtime slowdown.
lto = "off"
# Download rustc by default for library profile if compiler-affecting
# directories are not modified. For CI this is disabled.
# When building the standard library, you almost never want to build the compiler itself.
#
# If compiler-affecting directories are not modified, use precompiled rustc to speed up
# library development by skipping compiler builds.
download-rustc = "if-unchanged"
[llvm]

View file

@ -51,16 +51,18 @@ check-aux:
$(Q)$(BOOTSTRAP) test --stage 2 \
src/tools/cargo \
src/tools/cargotest \
src/etc/test-float-parse \
src/tools/test-float-parse \
$(BOOTSTRAP_ARGS)
# Run standard library tests in Miri.
$(Q)$(BOOTSTRAP) miri --stage 2 \
$(Q)MIRIFLAGS="-Zmiri-strict-provenance" \
$(BOOTSTRAP) miri --stage 2 \
library/coretests \
library/alloctests \
library/alloc \
$(BOOTSTRAP_ARGS) \
--no-doc
# Some doctests use file system operations to demonstrate dealing with `Result`.
# Some doctests use file system operations to demonstrate dealing with `Result`,
# so we have to run them with isolation disabled.
$(Q)MIRIFLAGS="-Zmiri-disable-isolation" \
$(BOOTSTRAP) miri --stage 2 \
library/coretests \
@ -70,22 +72,19 @@ check-aux:
--doc
# In `std` we cannot test everything, so we skip some modules.
$(Q)MIRIFLAGS="-Zmiri-disable-isolation" \
$(BOOTSTRAP) miri --stage 2 library/std \
$(BOOTSTRAP) miri --stage 2 \
library/std \
$(BOOTSTRAP_ARGS) \
--no-doc -- \
--skip fs:: --skip net:: --skip process:: --skip sys::fd:: --skip sys::pal::
$(Q)MIRIFLAGS="-Zmiri-disable-isolation" \
$(BOOTSTRAP) miri --stage 2 library/std \
$(BOOTSTRAP_ARGS) \
--doc -- \
-- \
--skip fs:: --skip net:: --skip process:: --skip sys::fd:: --skip sys::pal::
# Also test some very target-specific modules on other targets
# (making sure to cover an i686 target as well).
$(Q)MIRIFLAGS="-Zmiri-disable-isolation" BOOTSTRAP_SKIP_TARGET_SANITY=1 \
$(BOOTSTRAP) miri --stage 2 library/std \
$(BOOTSTRAP_ARGS) \
$(BOOTSTRAP) miri --stage 2 \
library/std \
--target aarch64-apple-darwin,i686-pc-windows-msvc \
--no-doc -- \
$(BOOTSTRAP_ARGS) \
-- \
time:: sync:: thread:: env::
dist:
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)

View file

@ -120,14 +120,12 @@ fn main() {
};
cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
cmd.arg("-Ztime-passes");
}
}
if let Some(crate_name) = crate_name
&& let Some(target) = env::var_os("RUSTC_TIME")
&& (target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name))
{
cmd.arg("-Ztime-passes");
}
// Print backtrace in case of ICE
@ -242,10 +240,10 @@ fn main() {
}
}
if env::var_os("RUSTC_BOLT_LINK_FLAGS").is_some() {
if let Some("rustc_driver") = crate_name {
cmd.arg("-Clink-args=-Wl,-q");
}
if env::var_os("RUSTC_BOLT_LINK_FLAGS").is_some()
&& let Some("rustc_driver") = crate_name
{
cmd.arg("-Clink-args=-Wl,-q");
}
let is_test = args.iter().any(|a| a == "--test");
@ -282,25 +280,24 @@ fn main() {
(child, status)
};
if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some()
if (env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some())
&& let Some(crate_name) = crate_name
{
if let Some(crate_name) = crate_name {
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or_default(),
);
}
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or_default(),
);
}
if status.success() {

View file

@ -1,5 +1,6 @@
//! Implementation of compiling the compiler and standard library, in "check"-based modes.
use crate::core::build_steps::compile;
use crate::core::build_steps::compile::{
add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, std_crates_for_run_make,
};
@ -27,11 +28,16 @@ pub struct Std {
/// passing `Builder::kind` to cargo invocations would run clippy on the entire compiler and library,
/// which is not useful if we only want to lint a few crates with specific rules.
override_build_kind: Option<Kind>,
/// Never use this from outside calls. It is intended for internal use only within `check::Std::make_run`
/// and `check::Std::run`.
custom_stage: Option<u32>,
}
impl Std {
const CRATE_OR_DEPS: &[&str] = &["sysroot", "coretests", "alloctests"];
pub fn new(target: TargetSelection) -> Self {
Self { target, crates: vec![], override_build_kind: None }
Self { target, crates: vec![], override_build_kind: None, custom_stage: None }
}
pub fn build_kind(mut self, kind: Option<Kind>) -> Self {
@ -45,22 +51,64 @@ impl Step for Std {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.crate_or_deps("sysroot")
.crate_or_deps("coretests")
.crate_or_deps("alloctests")
.path("library")
let mut run = run;
for c in Std::CRATE_OR_DEPS {
run = run.crate_or_deps(c);
}
run.path("library")
}
fn make_run(run: RunConfig<'_>) {
let crates = std_crates_for_run_make(&run);
run.builder.ensure(Std { target: run.target, crates, override_build_kind: None });
let stage = if run.builder.config.is_explicit_stage() || run.builder.top_stage >= 1 {
run.builder.top_stage
} else {
1
};
run.builder.ensure(Std {
target: run.target,
crates,
override_build_kind: None,
custom_stage: Some(stage),
});
}
fn run(self, builder: &Builder<'_>) {
if !builder.download_rustc() && builder.config.skip_std_check_if_no_download_rustc {
eprintln!(
"WARNING: `--skip-std-check-if-no-download-rustc` flag was passed and `rust.download-rustc` is not available. Skipping."
);
return;
}
builder.require_submodule("library/stdarch", None);
let stage = self.custom_stage.unwrap_or(builder.top_stage);
let target = self.target;
let compiler = builder.compiler(builder.top_stage, builder.config.build);
let compiler = builder.compiler(stage, builder.config.build);
if stage == 0 {
let mut is_explicitly_called =
builder.paths.iter().any(|p| p.starts_with("library") || p.starts_with("std"));
if !is_explicitly_called {
for c in Std::CRATE_OR_DEPS {
is_explicitly_called = builder.paths.iter().any(|p| p.starts_with(c));
}
}
if is_explicitly_called {
eprintln!("WARNING: stage 0 std is precompiled and does nothing during `x check`.");
}
// Reuse the stage0 libstd
builder.ensure(compile::Std::new(compiler, target));
return;
}
let mut cargo = builder::Cargo::new(
builder,
@ -84,6 +132,7 @@ impl Step for Std {
let _guard = builder.msg_check(
format_args!("library artifacts{}", crate_description(&self.crates)),
target,
Some(stage),
);
let stamp = build_stamp::libstd_stamp(builder, compiler, target).with_prefix("check");
@ -136,7 +185,7 @@ impl Step for Std {
}
let stamp = build_stamp::libstd_stamp(builder, compiler, target).with_prefix("check-test");
let _guard = builder.msg_check("library test/bench/example targets", target);
let _guard = builder.msg_check("library test/bench/example targets", target, Some(stage));
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
}
@ -237,6 +286,7 @@ impl Step for Rustc {
let _guard = builder.msg_check(
format_args!("compiler artifacts{}", crate_description(&self.crates)),
target,
None,
);
let stamp = build_stamp::librustc_stamp(builder, compiler, target).with_prefix("check");
@ -297,7 +347,7 @@ impl Step for CodegenBackend {
.arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml")));
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
let _guard = builder.msg_check(backend, target);
let _guard = builder.msg_check(backend, target, None);
let stamp = build_stamp::codegen_backend_stamp(builder, compiler, target, backend)
.with_prefix("check");
@ -364,7 +414,7 @@ impl Step for RustAnalyzer {
let stamp = BuildStamp::new(&builder.cargo_out(compiler, Mode::ToolRustc, target))
.with_prefix("rust-analyzer-check");
let _guard = builder.msg_check("rust-analyzer artifacts", target);
let _guard = builder.msg_check("rust-analyzer artifacts", target, None);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
}
@ -427,7 +477,7 @@ impl Step for Compiletest {
let stamp = BuildStamp::new(&builder.cargo_out(compiler, mode, self.target))
.with_prefix("compiletest-check");
let _guard = builder.msg_check("compiletest artifacts", self.target);
let _guard = builder.msg_check("compiletest artifacts", self.target, None);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
}
@ -505,7 +555,7 @@ fn run_tool_check_step(
let stamp = BuildStamp::new(&builder.cargo_out(compiler, Mode::ToolRustc, target))
.with_prefix(&format!("{}-check", step_type_name.to_lowercase()));
let _guard = builder.msg_check(format!("{display_name} artifacts"), target);
let _guard = builder.msg_check(format!("{display_name} artifacts"), target, None);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
@ -519,7 +569,7 @@ tool_check_step!(Miri { path: "src/tools/miri" });
tool_check_step!(CargoMiri { path: "src/tools/miri/cargo-miri" });
tool_check_step!(Rustfmt { path: "src/tools/rustfmt" });
tool_check_step!(MiroptTestTools { path: "src/tools/miropt-test-tools" });
tool_check_step!(TestFloatParse { path: "src/etc/test-float-parse" });
tool_check_step!(TestFloatParse { path: "src/tools/test-float-parse" });
tool_check_step!(FeaturesStatusDump { path: "src/tools/features-status-dump" });
tool_check_step!(Bootstrap { path: "src/bootstrap", default: false });

View file

@ -207,16 +207,18 @@ impl Step for Rustc {
let compiler = builder.compiler(builder.top_stage, builder.config.build);
let target = self.target;
if compiler.stage != 0 {
// If we're not in stage 0, then we won't have a std from the beta
// compiler around. That means we need to make sure there's one in
// the sysroot for the compiler to find. Otherwise, we're going to
// fail when building crates that need to generate code (e.g., build
// scripts and their dependencies).
builder.ensure(compile::Std::new(compiler, compiler.host));
builder.ensure(compile::Std::new(compiler, target));
} else {
builder.ensure(check::Std::new(target).build_kind(Some(Kind::Check)));
if !builder.download_rustc() {
if compiler.stage != 0 {
// If we're not in stage 0, then we won't have a std from the beta
// compiler around. That means we need to make sure there's one in
// the sysroot for the compiler to find. Otherwise, we're going to
// fail when building crates that need to generate code (e.g., build
// scripts and their dependencies).
builder.ensure(compile::Std::new(compiler, compiler.host));
builder.ensure(compile::Std::new(compiler, target));
} else {
builder.ensure(check::Std::new(target).build_kind(Some(Kind::Check)));
}
}
let mut cargo = builder::Cargo::new(
@ -286,7 +288,9 @@ macro_rules! lint_any {
let compiler = builder.compiler(builder.top_stage, builder.config.build);
let target = self.target;
builder.ensure(check::Rustc::new(target, builder).build_kind(Some(Kind::Check)));
if !builder.download_rustc() {
builder.ensure(check::Rustc::new(target, builder).build_kind(Some(Kind::Check)));
};
let cargo = prepare_tool_cargo(
builder,
@ -351,7 +355,7 @@ lint_any!(
Rustfmt, "src/tools/rustfmt", "rustfmt";
RustInstaller, "src/tools/rust-installer", "rust-installer";
Tidy, "src/tools/tidy", "tidy";
TestFloatParse, "src/etc/test-float-parse", "test-float-parse";
TestFloatParse, "src/tools/test-float-parse", "test-float-parse";
);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -147,14 +147,27 @@ impl Step for Std {
)]
fn run(self, builder: &Builder<'_>) {
let target = self.target;
let compiler = self.compiler;
// We already have std ready to be used for stage 0.
if self.compiler.stage == 0 {
let compiler = self.compiler;
builder.ensure(StdLink::from_std(self, compiler));
return;
}
let compiler = if builder.download_rustc() && self.force_recompile {
// When there are changes in the library tree with CI-rustc, we want to build
// the stageN library and that requires using stageN-1 compiler.
builder.compiler(self.compiler.stage.saturating_sub(1), builder.config.build)
} else {
self.compiler
};
// When using `download-rustc`, we already have artifacts for the host available. Don't
// recompile them.
if builder.download_rustc() && builder.config.is_host_target(target)
// NOTE: the beta compiler may generate different artifacts than the downloaded compiler, so
// its artifacts can't be reused.
&& compiler.stage != 0
if builder.download_rustc()
&& builder.config.is_host_target(target)
&& !self.force_recompile
{
let sysroot = builder.ensure(Sysroot { compiler, force_recompile: false });
@ -189,7 +202,13 @@ impl Step for Std {
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
trace!(?compiler_to_use);
if compiler_to_use != compiler {
if compiler_to_use != compiler
// Never uplift std unless we have compiled stage 1; if stage 1 is compiled,
// uplift it from there.
//
// FIXME: improve `fn compiler_for` to avoid adding stage condition here.
&& compiler.stage > 1
{
trace!(?compiler_to_use, ?compiler, "compiler != compiler_to_use, uplifting library");
builder.ensure(Std::new(compiler_to_use, target));
@ -222,27 +241,6 @@ impl Step for Std {
target_deps.extend(self.copy_extra_objects(builder, &compiler, target));
// The LLD wrappers and `rust-lld` are self-contained linking components that can be
// necessary to link the stdlib on some targets. We'll also need to copy these binaries to
// the `stage0-sysroot` to ensure the linker is found when bootstrapping on such a target.
if compiler.stage == 0 && builder.config.is_host_target(compiler.host) {
trace!(
"(build == host) copying linking components to `stage0-sysroot` for bootstrapping"
);
// We want to copy the host `bin` folder within the `rustlib` folder in the sysroot.
let src_sysroot_bin = builder
.rustc_snapshot_sysroot()
.join("lib")
.join("rustlib")
.join(compiler.host)
.join("bin");
if src_sysroot_bin.exists() {
let target_sysroot_bin = builder.sysroot_target_bindir(compiler, target);
t!(fs::create_dir_all(&target_sysroot_bin));
builder.cp_link_r(&src_sysroot_bin, &target_sysroot_bin);
}
}
// We build a sysroot for mir-opt tests using the same trick that Miri does: A check build
// with -Zalways-encode-mir. This frees us from the need to have a target linker, and the
// fact that this is a check build integrates nicely with run_cargo.
@ -628,18 +626,18 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// Help the libc crate compile by assisting it in finding various
// sysroot native libraries.
if target.contains("musl") {
if let Some(p) = builder.musl_libdir(target) {
let root = format!("native={}", p.to_str().unwrap());
cargo.rustflag("-L").rustflag(&root);
}
if target.contains("musl")
&& let Some(p) = builder.musl_libdir(target)
{
let root = format!("native={}", p.to_str().unwrap());
cargo.rustflag("-L").rustflag(&root);
}
if target.contains("-wasi") {
if let Some(dir) = builder.wasi_libdir(target) {
let root = format!("native={}", dir.to_str().unwrap());
cargo.rustflag("-L").rustflag(&root);
}
if target.contains("-wasi")
&& let Some(dir) = builder.wasi_libdir(target)
{
let root = format!("native={}", dir.to_str().unwrap());
cargo.rustflag("-L").rustflag(&root);
}
}
@ -670,7 +668,8 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// Enable frame pointers by default for the library. Note that they are still controlled by a
// separate setting for the compiler.
cargo.rustflag("-Cforce-frame-pointers=yes");
cargo.rustflag("-Zunstable-options");
cargo.rustflag("-Cforce-frame-pointers=non-leaf");
let html_root =
format!("-Zcrate-attr=doc(html_root_url=\"{}/\")", builder.doc_rust_lang_org_channel(),);
@ -737,7 +736,7 @@ impl Step for StdLink {
let target = self.target;
// NOTE: intentionally does *not* check `target == builder.build` to avoid having to add the same check in `test::Crate`.
let (libdir, hostdir) = if self.force_recompile && builder.download_rustc() {
let (libdir, hostdir) = if !self.force_recompile && builder.download_rustc() {
// NOTE: copies part of `sysroot_libdir` to avoid having to add a new `force_recompile` argument there too
let lib = builder.sysroot_libdir_relative(self.compiler);
let sysroot = builder.ensure(crate::core::build_steps::compile::Sysroot {
@ -753,23 +752,16 @@ impl Step for StdLink {
(libdir, hostdir)
};
add_to_sysroot(
builder,
&libdir,
&hostdir,
&build_stamp::libstd_stamp(builder, compiler, target),
);
let is_downloaded_beta_stage0 = builder
.build
.config
.initial_rustc
.starts_with(builder.out.join(compiler.host).join("stage0/bin"));
// Special case for stage0, to make `rustup toolchain link` and `x dist --stage 0`
// work for stage0-sysroot. We only do this if the stage0 compiler comes from beta,
// and is not set to a custom path.
if compiler.stage == 0
&& builder
.build
.config
.initial_rustc
.starts_with(builder.out.join(compiler.host).join("stage0/bin"))
{
if compiler.stage == 0 && is_downloaded_beta_stage0 {
// Copy bin files from stage0/bin to stage0-sysroot/bin
let sysroot = builder.out.join(compiler.host).join("stage0-sysroot");
@ -779,21 +771,9 @@ impl Step for StdLink {
t!(fs::create_dir_all(&sysroot_bin_dir));
builder.cp_link_r(&stage0_bin_dir, &sysroot_bin_dir);
// Copy all files from stage0/lib to stage0-sysroot/lib
let stage0_lib_dir = builder.out.join(host).join("stage0/lib");
if let Ok(files) = fs::read_dir(stage0_lib_dir) {
for file in files {
let file = t!(file);
let path = file.path();
if path.is_file() {
builder.copy_link(
&path,
&sysroot.join("lib").join(path.file_name().unwrap()),
FileType::Regular,
);
}
}
}
t!(fs::create_dir_all(sysroot.join("lib")));
builder.cp_link_r(&stage0_lib_dir, &sysroot.join("lib"));
// Copy codegen-backends from stage0
let sysroot_codegen_backends = builder.sysroot_codegen_backends(compiler);
@ -807,6 +787,30 @@ impl Step for StdLink {
if stage0_codegen_backends.exists() {
builder.cp_link_r(&stage0_codegen_backends, &sysroot_codegen_backends);
}
} else if compiler.stage == 0 {
let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot");
if builder.local_rebuild {
// On local rebuilds this path might be a symlink to the project root,
// which can be read-only (e.g., on CI). So remove it before copying
// the stage0 lib.
let _ = fs::remove_dir_all(sysroot.join("lib/rustlib/src/rust"));
}
builder.cp_link_r(&builder.initial_sysroot.join("lib"), &sysroot.join("lib"));
} else {
if builder.download_rustc() {
// Ensure there are no CI-rustc std artifacts.
let _ = fs::remove_dir_all(&libdir);
let _ = fs::remove_dir_all(&hostdir);
}
add_to_sysroot(
builder,
&libdir,
&hostdir,
&build_stamp::libstd_stamp(builder, compiler, target),
);
}
}
}
@ -1029,7 +1033,7 @@ impl Step for Rustc {
let compiler = self.compiler;
let target = self.target;
// NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler,
// NOTE: the ABI of the stage0 compiler is different from the ABI of the downloaded compiler,
// so its artifacts can't be reused.
if builder.download_rustc() && compiler.stage != 0 {
trace!(stage = compiler.stage, "`download_rustc` requested");
@ -1275,6 +1279,17 @@ pub fn rustc_cargo(
));
}
// The stage0 compiler changes infrequently and does not directly depend on code
// in the current working directory. Therefore, caching it with sccache should be
// useful.
// This is only performed for non-incremental builds, as ccache cannot deal with these.
if let Some(ref ccache) = builder.config.ccache
&& compiler.stage == 0
&& !builder.config.incremental
{
cargo.env("RUSTC_WRAPPER", ccache);
}
rustc_cargo_env(builder, cargo, target, compiler.stage);
}
@ -1388,12 +1403,13 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
// found. This is to avoid the linker errors about undefined references to
// `__llvm_profile_instrument_memop` when linking `rustc_driver`.
let mut llvm_linker_flags = String::new();
if builder.config.llvm_profile_generate && target.is_msvc() {
if let Some(ref clang_cl_path) = builder.config.llvm_clang_cl {
// Add clang's runtime library directory to the search path
let clang_rt_dir = get_clang_cl_resource_dir(builder, clang_cl_path);
llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display()));
}
if builder.config.llvm_profile_generate
&& target.is_msvc()
&& let Some(ref clang_cl_path) = builder.config.llvm_clang_cl
{
// Add clang's runtime library directory to the search path
let clang_rt_dir = get_clang_cl_resource_dir(builder, clang_cl_path);
llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display()));
}
// The config can also specify its own llvm linker flags.
@ -1409,7 +1425,7 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
cargo.env("LLVM_LINKER_FLAGS", llvm_linker_flags);
}
// Building with a static libstdc++ is only supported on linux right now,
// Building with a static libstdc++ is only supported on Linux and windows-gnu* right now,
// not for MSVC or macOS
if builder.config.llvm_static_stdcpp
&& !target.contains("freebsd")
@ -1417,12 +1433,14 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
&& !target.contains("apple")
&& !target.contains("solaris")
{
let libstdcxx_name =
if target.contains("windows-gnullvm") { "libc++.a" } else { "libstdc++.a" };
let file = compiler_file(
builder,
&builder.cxx(target).unwrap(),
target,
CLang::Cxx,
"libstdc++.a",
libstdcxx_name,
);
cargo.env("LLVM_STATIC_STDCPP", file);
}
@ -1783,9 +1801,9 @@ impl Step for Sysroot {
t!(fs::create_dir_all(&sysroot));
// In some cases(see https://github.com/rust-lang/rust/issues/109314), when the stage0
// compiler relies on more recent version of LLVM than the beta compiler, it may not
// compiler relies on more recent version of LLVM than the stage0 compiler, it may not
// be able to locate the correct LLVM in the sysroot. This situation typically occurs
// when we upgrade LLVM version while the beta compiler continues to use an older version.
// when we upgrade LLVM version while the stage0 compiler continues to use an older version.
//
// Make sure to add the correct version of LLVM into the stage0 sysroot.
if compiler.stage == 0 {
@ -1861,23 +1879,27 @@ impl Step for Sysroot {
// so that any tools relying on `rust-src` also work for local builds,
// and also for translating the virtual `/rustc/$hash` back to the real
// directory (for running tests with `rust.remap-debuginfo = true`).
let sysroot_lib_rustlib_src = sysroot.join("lib/rustlib/src");
t!(fs::create_dir_all(&sysroot_lib_rustlib_src));
let sysroot_lib_rustlib_src_rust = sysroot_lib_rustlib_src.join("rust");
if let Err(e) = symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_src_rust) {
eprintln!(
"ERROR: creating symbolic link `{}` to `{}` failed with {}",
sysroot_lib_rustlib_src_rust.display(),
builder.src.display(),
e,
);
if builder.config.rust_remap_debuginfo {
if compiler.stage != 0 {
let sysroot_lib_rustlib_src = sysroot.join("lib/rustlib/src");
t!(fs::create_dir_all(&sysroot_lib_rustlib_src));
let sysroot_lib_rustlib_src_rust = sysroot_lib_rustlib_src.join("rust");
if let Err(e) =
symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_src_rust)
{
eprintln!(
"ERROR: some `tests/ui` tests will fail when lacking `{}`",
"ERROR: creating symbolic link `{}` to `{}` failed with {}",
sysroot_lib_rustlib_src_rust.display(),
builder.src.display(),
e,
);
if builder.config.rust_remap_debuginfo {
eprintln!(
"ERROR: some `tests/ui` tests will fail when lacking `{}`",
sysroot_lib_rustlib_src_rust.display(),
);
}
build_helper::exit!(1);
}
build_helper::exit!(1);
}
// rustc-src component is already part of CI rustc's sysroot
@ -1984,17 +2006,20 @@ impl Step for Assemble {
trace!("installing `{tool}`");
let tool_exe = exe(tool, target_compiler.host);
let src_path = llvm_bin_dir.join(&tool_exe);
// When using `download-ci-llvm`, some of the tools
// may not exist, so skip trying to copy them.
if src_path.exists() {
// There is a chance that these tools are being installed from an external LLVM.
// Use `Builder::resolve_symlink_and_copy` instead of `Builder::copy_link` to ensure
// we are copying the original file not the symlinked path, which causes issues for
// tarball distribution.
//
// See https://github.com/rust-lang/rust/issues/135554.
builder.resolve_symlink_and_copy(&src_path, &libdir_bin.join(&tool_exe));
// When using `download-ci-llvm`, some of the tools may not exist, so skip trying to copy them.
if !src_path.exists() && builder.config.llvm_from_ci {
eprintln!("{} does not exist; skipping copy", src_path.display());
continue;
}
// There is a chance that these tools are being installed from an external LLVM.
// Use `Builder::resolve_symlink_and_copy` instead of `Builder::copy_link` to ensure
// we are copying the original file not the symlinked path, which causes issues for
// tarball distribution.
//
// See https://github.com/rust-lang/rust/issues/135554.
builder.resolve_symlink_and_copy(&src_path, &libdir_bin.join(&tool_exe));
}
}
}

View file

@ -776,7 +776,8 @@ impl Step for RustcDev {
copy_src_dirs(
builder,
&builder.src,
&["compiler"],
// The compiler has a path dependency on proc_macro, so make sure to include it.
&["compiler", "library/proc_macro"],
&[],
&tarball.image_dir().join("lib/rustlib/rustc-src/rust"),
);
@ -2274,12 +2275,17 @@ impl Step for LlvmTools {
let target = self.target;
/* run only if llvm-config isn't used */
if let Some(config) = builder.config.target_config.get(&target) {
if let Some(ref _s) = config.llvm_config {
builder.info(&format!("Skipping LlvmTools ({target}): external LLVM"));
return None;
}
// Run only if a custom llvm-config is not used
if let Some(config) = builder.config.target_config.get(&target)
&& !builder.config.llvm_from_ci
&& config.llvm_config.is_some()
{
builder.info(&format!("Skipping LlvmTools ({target}): external LLVM"));
return None;
}
if !builder.config.dry_run() {
builder.require_submodule("src/llvm-project", None);
}
builder.ensure(crate::core::build_steps::llvm::Llvm { target });
@ -2294,6 +2300,12 @@ impl Step for LlvmTools {
let dst_bindir = format!("lib/rustlib/{}/bin", target.triple);
for tool in tools_to_install(&builder.paths) {
let exe = src_bindir.join(exe(tool, target));
// When using `download-ci-llvm`, some of the tools may not exist, so skip trying to copy them.
if !exe.exists() && builder.config.llvm_from_ci {
eprintln!("{} does not exist; skipping copy", exe.display());
continue;
}
tarball.add_file(&exe, &dst_bindir, FileType::Executable);
}
}
@ -2387,11 +2399,15 @@ impl Step for RustDev {
let target = self.target;
/* run only if llvm-config isn't used */
if let Some(config) = builder.config.target_config.get(&target) {
if let Some(ref _s) = config.llvm_config {
builder.info(&format!("Skipping RustDev ({target}): external LLVM"));
return None;
}
if let Some(config) = builder.config.target_config.get(&target)
&& let Some(ref _s) = config.llvm_config
{
builder.info(&format!("Skipping RustDev ({target}): external LLVM"));
return None;
}
if !builder.config.dry_run() {
builder.require_submodule("src/llvm-project", None);
}
let mut tarball = Tarball::new(builder, "rust-dev", &target.triple);

View file

@ -318,10 +318,10 @@ pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) {
// `into_path` produces an absolute path. Try to strip `cwd` to get a shorter
// relative path.
let mut path = entry.clone().into_path();
if let Ok(cwd) = cwd {
if let Ok(path2) = path.strip_prefix(cwd) {
path = path2.to_path_buf();
}
if let Ok(cwd) = cwd
&& let Ok(path2) = path.strip_prefix(cwd)
{
path = path2.to_path_buf();
}
path.display().to_string()
});

View file

@ -38,7 +38,9 @@ fn sanitize_sh(path: &Path, is_cygwin: bool) -> String {
if ch.next() != Some('/') {
return None;
}
Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
// The prefix for Windows drives in Cygwin/MSYS2 is configurable, but
// /proc/cygdrive is available regardless of configuration since 1.7.33
Some(format!("/proc/cygdrive/{}/{}", drive, &s[drive.len_utf8() + 2..]))
}
}
@ -244,7 +246,7 @@ install!((self, builder, _config),
);
}
};
LlvmTools, alias = "llvm-tools", Self::should_build(_config), only_hosts: true, {
LlvmTools, alias = "llvm-tools", _config.llvm_tools_enabled && _config.llvm_enabled(_config.build), only_hosts: true, {
if let Some(tarball) = builder.ensure(dist::LlvmTools { target: self.target }) {
install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball);
} else {

View file

@ -107,18 +107,18 @@ pub fn prebuilt_llvm_config(
// If we're using a custom LLVM bail out here, but we can only use a
// custom LLVM for the build triple.
if let Some(config) = builder.config.target_config.get(&target) {
if let Some(ref s) = config.llvm_config {
check_llvm_version(builder, s);
let llvm_config = s.to_path_buf();
let mut llvm_cmake_dir = llvm_config.clone();
llvm_cmake_dir.pop();
llvm_cmake_dir.pop();
llvm_cmake_dir.push("lib");
llvm_cmake_dir.push("cmake");
llvm_cmake_dir.push("llvm");
return LlvmBuildStatus::AlreadyBuilt(LlvmResult { llvm_config, llvm_cmake_dir });
}
if let Some(config) = builder.config.target_config.get(&target)
&& let Some(ref s) = config.llvm_config
{
check_llvm_version(builder, s);
let llvm_config = s.to_path_buf();
let mut llvm_cmake_dir = llvm_config.clone();
llvm_cmake_dir.pop();
llvm_cmake_dir.pop();
llvm_cmake_dir.push("lib");
llvm_cmake_dir.push("cmake");
llvm_cmake_dir.push("llvm");
return LlvmBuildStatus::AlreadyBuilt(LlvmResult { llvm_config, llvm_cmake_dir });
}
if handle_submodule_when_needed {
@ -285,7 +285,8 @@ impl Step for Llvm {
LlvmBuildStatus::ShouldBuild(m) => m,
};
if builder.llvm_link_shared() && target.is_windows() {
if builder.llvm_link_shared() && target.is_windows() && !target.ends_with("windows-gnullvm")
{
panic!("shared linking to LLVM is not currently supported on {}", target.triple);
}
@ -442,23 +443,26 @@ impl Step for Llvm {
// See https://github.com/rust-lang/rust/pull/50104
cfg.define("LLVM_ENABLE_LIBXML2", "OFF");
if !enabled_llvm_projects.is_empty() {
enabled_llvm_projects.sort();
enabled_llvm_projects.dedup();
cfg.define("LLVM_ENABLE_PROJECTS", enabled_llvm_projects.join(";"));
}
let mut enabled_llvm_runtimes = Vec::new();
if helpers::forcing_clang_based_tests() {
enabled_llvm_runtimes.push("compiler-rt");
}
// This is an experimental flag, which likely builds more than necessary.
// We will optimize it when we get closer to releasing it on nightly.
if builder.config.llvm_offload {
enabled_llvm_runtimes.push("offload");
//FIXME(ZuseZ4): LLVM intends to drop the offload dependency on openmp.
//Remove this line once they achieved it.
enabled_llvm_runtimes.push("openmp");
enabled_llvm_projects.push("compiler-rt");
}
if !enabled_llvm_projects.is_empty() {
enabled_llvm_projects.sort();
enabled_llvm_projects.dedup();
cfg.define("LLVM_ENABLE_PROJECTS", enabled_llvm_projects.join(";"));
}
if !enabled_llvm_runtimes.is_empty() {
@ -467,10 +471,10 @@ impl Step for Llvm {
cfg.define("LLVM_ENABLE_RUNTIMES", enabled_llvm_runtimes.join(";"));
}
if let Some(num_linkers) = builder.config.llvm_link_jobs {
if num_linkers > 0 {
cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
}
if let Some(num_linkers) = builder.config.llvm_link_jobs
&& num_linkers > 0
{
cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
}
// https://llvm.org/docs/HowToCrossCompileLLVM.html
@ -596,10 +600,10 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
let version = get_llvm_version(builder, llvm_config);
let mut parts = version.split('.').take(2).filter_map(|s| s.parse::<u32>().ok());
if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) {
if major >= 19 {
return;
}
if let (Some(major), Some(_minor)) = (parts.next(), parts.next())
&& major >= 19
{
return;
}
panic!("\n\nbad LLVM version: {version}, need >=19\n\n")
}
@ -729,11 +733,9 @@ fn configure_cmake(
// If ccache is configured we inform the build a little differently how
// to invoke ccache while also invoking our compilers.
if use_compiler_launcher {
if let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
}
if use_compiler_launcher && let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
}
cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
.define("CMAKE_CXX_COMPILER", sanitize_cc(&cxx))
@ -791,20 +793,20 @@ fn configure_cmake(
cxxflags.push(format!(" --target={target}"));
}
cfg.define("CMAKE_CXX_FLAGS", cxxflags);
if let Some(ar) = builder.ar(target) {
if ar.is_absolute() {
// LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
cfg.define("CMAKE_AR", sanitize_cc(&ar));
}
if let Some(ar) = builder.ar(target)
&& ar.is_absolute()
{
// LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
cfg.define("CMAKE_AR", sanitize_cc(&ar));
}
if let Some(ranlib) = builder.ranlib(target) {
if ranlib.is_absolute() {
// LLVM build breaks if `CMAKE_RANLIB` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
cfg.define("CMAKE_RANLIB", sanitize_cc(&ranlib));
}
if let Some(ranlib) = builder.ranlib(target)
&& ranlib.is_absolute()
{
// LLVM build breaks if `CMAKE_RANLIB` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
cfg.define("CMAKE_RANLIB", sanitize_cc(&ranlib));
}
if let Some(ref flags) = builder.config.llvm_ldflags {
@ -1037,13 +1039,14 @@ impl Step for Lld {
// when doing PGO on CI, cmake or clang-cl don't automatically link clang's
// profiler runtime in. In that case, we need to manually ask cmake to do it, to avoid
// linking errors, much like LLVM's cmake setup does in that situation.
if builder.config.llvm_profile_generate && target.is_msvc() {
if let Some(clang_cl_path) = builder.config.llvm_clang_cl.as_ref() {
// Find clang's runtime library directory and push that as a search path to the
// cmake linker flags.
let clang_rt_dir = get_clang_cl_resource_dir(builder, clang_cl_path);
ldflags.push_all(format!("/libpath:{}", clang_rt_dir.display()));
}
if builder.config.llvm_profile_generate
&& target.is_msvc()
&& let Some(clang_cl_path) = builder.config.llvm_clang_cl.as_ref()
{
// Find clang's runtime library directory and push that as a search path to the
// cmake linker flags.
let clang_rt_dir = get_clang_cl_resource_dir(builder, clang_cl_path);
ldflags.push_all(format!("/libpath:{}", clang_rt_dir.display()));
}
// LLD is built as an LLVM tool, but is distributed outside of the `llvm-tools` component,
@ -1430,6 +1433,7 @@ impl Step for Libunwind {
cfg.flag("-funwind-tables");
cfg.flag("-fvisibility=hidden");
cfg.define("_LIBUNWIND_DISABLE_VISIBILITY_ANNOTATIONS", None);
cfg.define("_LIBUNWIND_IS_NATIVE_ONLY", "1");
cfg.include(root.join("include"));
cfg.cargo_metadata(false);
cfg.out_dir(&out_dir);
@ -1447,12 +1451,10 @@ impl Step for Libunwind {
cfg.define("__NO_STRING_INLINES", None);
cfg.define("__NO_MATH_INLINES", None);
cfg.define("_LIBUNWIND_IS_BAREMETAL", None);
cfg.define("__LIBUNWIND_IS_NATIVE_ONLY", None);
cfg.define("NDEBUG", None);
}
if self.target.is_windows() {
cfg.define("_LIBUNWIND_HIDE_SYMBOLS", "1");
cfg.define("_LIBUNWIND_IS_NATIVE_ONLY", "1");
}
}

View file

@ -1,3 +1,4 @@
use std::env::consts::EXE_EXTENSION;
use std::fmt::{Display, Formatter};
use crate::core::build_steps::compile::{Std, Sysroot};
@ -153,14 +154,17 @@ Consider setting `rust.debuginfo-level = 1` in `bootstrap.toml`."#);
let compiler = builder.compiler(builder.top_stage, builder.config.build);
builder.ensure(Std::new(compiler, builder.config.build));
if let Some(opts) = args.cmd.shared_opts() {
if opts.profiles.contains(&Profile::Doc) {
builder.ensure(Rustdoc { compiler });
}
if let Some(opts) = args.cmd.shared_opts()
&& opts.profiles.contains(&Profile::Doc)
{
builder.ensure(Rustdoc { compiler });
}
let sysroot = builder.ensure(Sysroot::new(compiler));
let rustc = sysroot.join("bin/rustc");
let mut rustc = sysroot.clone();
rustc.push("bin");
rustc.push("rustc");
rustc.set_extension(EXE_EXTENSION);
let rustc_perf_dir = builder.build.tempdir().join("rustc-perf");
let results_dir = rustc_perf_dir.join("results");

View file

@ -5,7 +5,6 @@
use std::path::PathBuf;
use crate::Mode;
use crate::core::build_steps::dist::distdir;
use crate::core::build_steps::test;
use crate::core::build_steps::tool::{self, SourceType, Tool};
@ -14,6 +13,7 @@ use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::core::config::TargetSelection;
use crate::core::config::flags::get_completion;
use crate::utils::exec::command;
use crate::{Mode, t};
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct BuildManifest;
@ -118,15 +118,25 @@ impl Step for Miri {
fn run(self, builder: &Builder<'_>) {
let host = builder.build.build;
let target = self.target;
let stage = builder.top_stage;
// `x run` uses stage 0 by default but miri does not work well with stage 0.
// Change the stage to 1 if it's not set explicitly.
let stage = if builder.config.is_explicit_stage() || builder.top_stage >= 1 {
builder.top_stage
} else {
1
};
if stage == 0 {
eprintln!("miri cannot be run at stage 0");
std::process::exit(1);
}
// This compiler runs on the host, we'll just use it for the target.
let target_compiler = builder.compiler(stage, host);
let host_compiler = tool::get_tool_rustc_compiler(builder, target_compiler);
let target_compiler = builder.compiler(stage, target);
let miri_build = builder.ensure(tool::Miri { compiler: target_compiler, target });
// Rustc tools are off by one stage, so use the build compiler to run miri.
let host_compiler = miri_build.build_compiler;
// Get a target sysroot for Miri.
let miri_sysroot = test::Miri::build_miri_sysroot(builder, target_compiler, target);
@ -243,6 +253,7 @@ impl Step for GenerateCopyright {
cmd.env("SRC_DIR", &builder.src);
cmd.env("VENDOR_DIR", &vendored_sources);
cmd.env("CARGO", &builder.initial_cargo);
cmd.env("CARGO_HOME", t!(home::cargo_home()));
// it is important that generate-copyright runs from the root of the
// source tree, because it uses relative paths
cmd.current_dir(&builder.src);

View file

@ -241,10 +241,10 @@ impl Step for Link {
if run.builder.config.dry_run() {
return;
}
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "link" {
run.builder.ensure(Link);
}
if let [cmd] = &run.paths[..]
&& cmd.assert_single_path().path.as_path().as_os_str() == "link"
{
run.builder.ensure(Link);
}
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
@ -457,10 +457,10 @@ impl Step for Hook {
}
fn make_run(run: RunConfig<'_>) {
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "hook" {
run.builder.ensure(Hook);
}
if let [cmd] = &run.paths[..]
&& cmd.assert_single_path().path.as_path().as_os_str() == "hook"
{
run.builder.ensure(Hook);
}
}
@ -585,11 +585,13 @@ Select which editor you would like to set up [default: None]: ";
"d29af4d949bbe2371eac928a3c31cf9496b1701aa1c45f11cd6c759865ad5c45",
"b5dd299b93dca3ceeb9b335f929293cb3d4bf4977866fbe7ceeac2a8a9f99088",
"631c837b0e98ae35fd48b0e5f743b1ca60adadf2d0a2b23566ba25df372cf1a9",
"080955765db84bb6cbf178879f489c4e2369397626a6ecb3debedb94a9d0b3ce",
],
EditorKind::Helix => &[
"2d3069b8cf1b977e5d4023965eb6199597755e6c96c185ed5f2854f98b83d233",
"6736d61409fbebba0933afd2e4c44ff2f97c1cb36cf0299a7f4a7819b8775040",
"f252dcc30ca85a193a699581e5e929d5bd6c19d40d7a7ade5e257a9517a124a5",
"198c195ed0c070d15907b279b8b4ea96198ca71b939f5376454f3d636ab54da5",
],
EditorKind::Vim | EditorKind::VsCode => &[
"ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8",
@ -604,11 +606,13 @@ Select which editor you would like to set up [default: None]: ";
"c394386e6133bbf29ffd32c8af0bb3d4aac354cba9ee051f29612aa9350f8f8d",
"e53e9129ca5ee5dcbd6ec8b68c2d87376474eb154992deba3c6d9ab1703e0717",
"f954316090936c7e590c253ca9d524008375882fa13c5b41d7e2547a896ff893",
"701b73751efd7abd6487f2c79348dab698af7ac4427b79fa3d2087c867144b12",
],
EditorKind::Zed => &[
"bbce727c269d1bd0c98afef4d612eb4ce27aea3c3a8968c5f10b31affbc40b6c",
"a5380cf5dd9328731aecc5dfb240d16dac46ed272126b9728006151ef42f5909",
"2e96bf0d443852b12f016c8fc9840ab3d0a2b4fe0b0fb3a157e8d74d5e7e0e26",
"4fadd4c87389a601a27db0d3d74a142fa3a2e656ae78982e934dbe24bee32ad6",
],
}
}
@ -668,10 +672,10 @@ impl Step for Editor {
if run.builder.config.dry_run() {
return;
}
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "editor" {
run.builder.ensure(Editor);
}
if let [cmd] = &run.paths[..]
&& cmd.assert_single_path().path.as_path().as_os_str() == "editor"
{
run.builder.ensure(Editor);
}
}

View file

@ -739,7 +739,7 @@ impl Step for Clippy {
const DEFAULT: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/clippy")
run.suite_path("src/tools/clippy/tests").path("src/tools/clippy")
}
fn make_run(run: RunConfig<'_>) {
@ -783,6 +783,23 @@ impl Step for Clippy {
let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir());
cargo.env("HOST_LIBS", host_libs);
// Collect paths of tests to run
'partially_test: {
let paths = &builder.config.paths[..];
let mut test_names = Vec::new();
for path in paths {
if let Some(path) =
helpers::is_valid_test_suite_arg(path, "src/tools/clippy/tests", builder)
{
test_names.push(path);
} else if path.ends_with("src/tools/clippy") {
// When src/tools/clippy is called directly, all tests should be run.
break 'partially_test;
}
}
cargo.env("TESTNAME", test_names.join(","));
}
cargo.add_rustc_lib_path(builder);
let cargo = prepare_cargo_test(cargo, &[], &[], host, builder);
@ -1559,7 +1576,7 @@ impl Step for Compiletest {
if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() {
eprintln!("\
ERROR: `--stage 0` runs compiletest on the beta compiler, not your local changes, and will almost always cause tests to fail
ERROR: `--stage 0` runs compiletest on the stage0 (precompiled) compiler, not your local changes, and will almost always cause tests to fail
HELP: to test the compiler, use `--stage 1` instead
HELP: to test the standard library, use `--stage 0 library/std` instead
NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`."
@ -1587,9 +1604,9 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// NOTE: Only stage 1 is special cased because we need the rustc_private artifacts to match the
// running compiler in stage 2 when plugins run.
let (stage, stage_id) = if suite == "ui-fulldeps" && compiler.stage == 1 {
// At stage 0 (stage - 1) we are using the beta compiler. Using `self.target` can lead
// finding an incorrect compiler path on cross-targets, as the stage 0 beta compiler is
// always equal to `build.build` in the configuration.
// At stage 0 (stage - 1) we are using the stage0 compiler. Using `self.target` can lead
// finding an incorrect compiler path on cross-targets, as the stage 0 is always equal to
// `build.build` in the configuration.
let build = builder.build.build;
compiler = builder.compiler(compiler.stage - 1, build);
let test_stage = compiler.stage + 1;
@ -1675,7 +1692,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
}
if mode == "rustdoc-json" {
// Use the beta compiler for jsondocck
// Use the stage0 compiler for jsondocck
let json_compiler = compiler.with_stage(0);
cmd.arg("--jsondocck-path")
.arg(builder.ensure(tool::JsonDocCk { compiler: json_compiler, target }).tool_path);
@ -2400,10 +2417,10 @@ impl Step for ErrorIndex {
}
fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> bool {
if let Ok(contents) = fs::read_to_string(markdown) {
if !contents.contains("```") {
return true;
}
if let Ok(contents) = fs::read_to_string(markdown)
&& !contents.contains("```")
{
return true;
}
builder.verbose(|| println!("doc tests for: {}", markdown.display()));
@ -2690,16 +2707,6 @@ impl Step for Crate {
.arg(builder.src.join("library/sysroot/Cargo.toml"));
} else {
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
// `std_cargo` actually does the wrong thing: it passes `--sysroot build/host/stage2`,
// but we want to use the force-recompile std we just built in `build/host/stage2-test-sysroot`.
// Override it.
if builder.download_rustc() && compiler.stage > 0 {
let sysroot = builder
.out
.join(compiler.host)
.join(format!("stage{}-test-sysroot", compiler.stage));
cargo.env("RUSTC_SYSROOT", sysroot);
}
}
}
Mode::Rustc => {
@ -2947,7 +2954,14 @@ impl Step for Distcheck {
run.builder.ensure(Distcheck);
}
/// Runs "distcheck", a 'make check' from a tarball
/// Runs `distcheck`, which is a collection of smoke tests:
///
/// - Run `make check` from an unpacked dist tarball to make sure we can at the minimum run
/// check steps from those sources.
/// - Check that selected dist components (`rust-src` only at the moment) at least have expected
/// directory shape and crate manifests that cargo can generate a lockfile from.
///
/// FIXME(#136822): dist components are under-tested.
fn run(self, builder: &Builder<'_>) {
builder.info("Distcheck");
let dir = builder.tempdir().join("distcheck");
@ -3520,7 +3534,7 @@ impl Step for CodegenGCC {
}
/// Test step that does two things:
/// - Runs `cargo test` for the `src/etc/test-float-parse` tool.
/// - Runs `cargo test` for the `src/tools/test-float-parse` tool.
/// - Invokes the `test-float-parse` tool to test the standard library's
/// float parsing routines.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -3535,7 +3549,7 @@ impl Step for TestFloatParse {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/etc/test-float-parse")
run.path("src/tools/test-float-parse")
}
fn make_run(run: RunConfig<'_>) {

View file

@ -148,6 +148,17 @@ impl Step for ToolBuild {
&self.extra_features,
);
// The stage0 compiler changes infrequently and does not directly depend on code
// in the current working directory. Therefore, caching it with sccache should be
// useful.
// This is only performed for non-incremental builds, as ccache cannot deal with these.
if let Some(ref ccache) = builder.config.ccache
&& matches!(self.mode, Mode::ToolBootstrap)
&& !builder.config.incremental
{
cargo.env("RUSTC_WRAPPER", ccache);
}
// Rustc tools (miri, clippy, cargo, rustfmt, rust-analyzer)
// could use the additional optimizations.
if self.mode == Mode::ToolRustc && is_lto_stage(&self.compiler) {
@ -329,9 +340,9 @@ pub(crate) fn get_tool_rustc_compiler(
return target_compiler;
}
if builder.download_rustc() && target_compiler.stage > 0 {
// We already have the stage N compiler, we don't need to cut the stage.
return builder.compiler(target_compiler.stage, builder.config.build);
if builder.download_rustc() && target_compiler.stage == 1 {
// We shouldn't drop to stage0 compiler when using CI rustc.
return builder.compiler(1, builder.config.build);
}
// Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
@ -1197,9 +1208,9 @@ fn run_tool_build_step(
artifact_kind: ToolArtifactKind::Binary,
});
// FIXME: This should just be an if-let-chain, but those are unstable.
if let Some(add_bins_to_sysroot) =
add_bins_to_sysroot.filter(|bins| !bins.is_empty() && target_compiler.stage > 0)
if let Some(add_bins_to_sysroot) = add_bins_to_sysroot
&& !add_bins_to_sysroot.is_empty()
&& target_compiler.stage > 0
{
let bindir = builder.sysroot(target_compiler).join("bin");
t!(fs::create_dir_all(&bindir));
@ -1269,7 +1280,7 @@ impl Step for TestFloatParse {
const DEFAULT: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/etc/test-float-parse")
run.path("src/tools/test-float-parse")
}
fn run(self, builder: &Builder<'_>) -> ToolBuildResult {
@ -1281,7 +1292,7 @@ impl Step for TestFloatParse {
target: bootstrap_host,
tool: "test-float-parse",
mode: Mode::ToolStd,
path: "src/etc/test-float-parse",
path: "src/tools/test-float-parse",
source_type: SourceType::InTree,
extra_features: Vec::new(),
allow_features: Self::ALLOW_FEATURES,
@ -1302,10 +1313,8 @@ impl Builder<'_> {
//
// Notably this munges the dynamic library lookup path to point to the
// right location to run `compiler`.
let mut lib_paths: Vec<PathBuf> = vec![
self.build.rustc_snapshot_libdir(),
self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps"),
];
let mut lib_paths: Vec<PathBuf> =
vec![self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps")];
// On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make
// mode) and that C compiler may need some extra PATH modification. Do

View file

@ -11,7 +11,7 @@ use crate::utils::build_stamp;
use crate::utils::helpers::{self, LldThreads, check_cfg_arg, linker_args, linker_flags};
use crate::{
BootstrapCommand, CLang, Compiler, Config, DocTests, DryRun, EXTRA_CHECK_CFGS, GitRepo, Mode,
TargetSelection, command, prepare_behaviour_dump_dir, t,
RemapScheme, TargetSelection, command, prepare_behaviour_dump_dir, t,
};
/// Represents flag values in `String` form with whitespace delimiter to pass it to the compiler
@ -636,6 +636,15 @@ impl Builder<'_> {
for (restricted_mode, name, values) in EXTRA_CHECK_CFGS {
if restricted_mode.is_none() || *restricted_mode == Some(mode) {
rustflags.arg(&check_cfg_arg(name, *values));
if *name == "bootstrap" {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`.
// We also declare that the flag is expected, which we need to do to not
// get warnings about it being unexpected.
hostflags.arg(check_cfg_arg(name, *values));
}
}
}
@ -645,13 +654,6 @@ impl Builder<'_> {
if stage == 0 {
hostflags.arg("--cfg=bootstrap");
}
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`.
// We also declare that the flag is expected, which we need to do to not
// get warnings about it being unexpected.
hostflags.arg("-Zunstable-options");
hostflags.arg("--check-cfg=cfg(bootstrap)");
// FIXME: It might be better to use the same value for both `RUSTFLAGS` and `RUSTDOCFLAGS`,
// but this breaks CI. At the very least, stage0 `rustdoc` needs `--cfg bootstrap`. See
@ -920,13 +922,46 @@ impl Builder<'_> {
hostflags.arg(format!("-Ctarget-feature={sign}crt-static"));
}
if let Some(map_to) = self.build.debuginfo_map_to(GitRepo::Rustc) {
let map = format!("{}={}", self.build.src.display(), map_to);
cargo.env("RUSTC_DEBUGINFO_MAP", map);
// `rustc` needs to know the remapping scheme, in order to know how to reverse it (unremap)
// later. Two env vars are set and made available to the compiler
//
// - `CFG_VIRTUAL_RUST_SOURCE_BASE_DIR`: `rust-src` remap scheme (`NonCompiler`)
// - `CFG_VIRTUAL_RUSTC_DEV_SOURCE_BASE_DIR`: `rustc-dev` remap scheme (`Compiler`)
//
// Keep this scheme in sync with `rustc_metadata::rmeta::decoder`'s
// `try_to_translate_virtual_to_real`.
//
// `RUSTC_DEBUGINFO_MAP` is used to pass through to the underlying rustc
// `--remap-path-prefix`.
match mode {
Mode::Rustc | Mode::Codegen => {
if let Some(ref map_to) =
self.build.debuginfo_map_to(GitRepo::Rustc, RemapScheme::NonCompiler)
{
cargo.env("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR", map_to);
}
// `rustc` needs to know the virtual `/rustc/$hash` we're mapping to,
// in order to opportunistically reverse it later.
cargo.env("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR", map_to);
if let Some(ref map_to) =
self.build.debuginfo_map_to(GitRepo::Rustc, RemapScheme::Compiler)
{
// When building compiler sources, we want to apply the compiler remap scheme.
cargo.env(
"RUSTC_DEBUGINFO_MAP",
format!("{}={}", self.build.src.display(), map_to),
);
cargo.env("CFG_VIRTUAL_RUSTC_DEV_SOURCE_BASE_DIR", map_to);
}
}
Mode::Std | Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd => {
if let Some(ref map_to) =
self.build.debuginfo_map_to(GitRepo::Rustc, RemapScheme::NonCompiler)
{
cargo.env(
"RUSTC_DEBUGINFO_MAP",
format!("{}={}", self.build.src.display(), map_to),
);
}
}
}
if self.config.rust_remap_debuginfo {
@ -988,15 +1023,15 @@ impl Builder<'_> {
// requirement, but the `-L` library path is not propagated across
// separate Cargo projects. We can add LLVM's library path to the
// rustc args as a workaround.
if mode == Mode::ToolRustc || mode == Mode::Codegen {
if let Some(llvm_config) = self.llvm_config(target) {
let llvm_libdir =
command(llvm_config).arg("--libdir").run_capture_stdout(self).stdout();
if target.is_msvc() {
rustflags.arg(&format!("-Clink-arg=-LIBPATH:{llvm_libdir}"));
} else {
rustflags.arg(&format!("-Clink-arg=-L{llvm_libdir}"));
}
if (mode == Mode::ToolRustc || mode == Mode::Codegen)
&& let Some(llvm_config) = self.llvm_config(target)
{
let llvm_libdir =
command(llvm_config).arg("--libdir").run_capture_stdout(self).stdout();
if target.is_msvc() {
rustflags.arg(&format!("-Clink-arg=-LIBPATH:{llvm_libdir}"));
} else {
rustflags.arg(&format!("-Clink-arg=-L{llvm_libdir}"));
}
}
@ -1004,7 +1039,12 @@ impl Builder<'_> {
// efficient initial-exec TLS model. This doesn't work with `dlopen`,
// so we can't use it by default in general, but we can use it for tools
// and our own internal libraries.
if !mode.must_support_dlopen() && !target.triple.starts_with("powerpc-") {
//
// Cygwin only supports emutls.
if !mode.must_support_dlopen()
&& !target.triple.starts_with("powerpc-")
&& !target.triple.contains("cygwin")
{
cargo.env("RUSTC_TLS_MODEL_INITIAL_EXEC", "1");
}
@ -1226,12 +1266,11 @@ impl Builder<'_> {
_ => None,
};
if let Some(limit) = limit {
if stage == 0
|| self.config.default_codegen_backend(target).unwrap_or_default() == "llvm"
{
rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}"));
}
if let Some(limit) = limit
&& (stage == 0
|| self.config.default_codegen_backend(target).unwrap_or_default() == "llvm")
{
rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}"));
}
}

View file

@ -945,7 +945,6 @@ impl<'a> Builder<'a> {
clippy::CI,
),
Kind::Check | Kind::Fix => describe!(
check::Std,
check::Rustc,
check::Rustdoc,
check::CodegenBackend,
@ -961,6 +960,13 @@ impl<'a> Builder<'a> {
check::Compiletest,
check::FeaturesStatusDump,
check::CoverageDump,
// This has special staging logic, it may run on stage 1 while others run on stage 0.
// It takes quite some time to build stage 1, so put this at the end.
//
// FIXME: This also helps bootstrap to not interfere with stage 0 builds. We should probably fix
// that issue somewhere else, but we still want to keep `check::Std` at the end so that the
// quicker steps run before this.
check::Std,
),
Kind::Test => describe!(
crate::core::build_steps::toolstate::ToolStateCheck,

View file

@ -237,7 +237,7 @@ fn alias_and_path_for_library() {
);
assert_eq!(
first(cache.all::<doc::Std>()),
&[doc_std!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 0)]
&[doc_std!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 1)]
);
}
@ -254,19 +254,6 @@ fn ci_rustc_if_unchanged_invalidate_on_compiler_changes() {
});
}
#[test]
fn ci_rustc_if_unchanged_invalidate_on_library_changes_in_ci() {
git_test(|ctx| {
prepare_rustc_checkout(ctx);
ctx.create_upstream_merge(&["compiler/bar"]);
// This change should invalidate download-ci-rustc
ctx.create_nonupstream_merge(&["library/foo"]);
let config = parse_config_download_rustc_at(ctx.get_path(), "if-unchanged", true);
assert_eq!(config.download_rustc_commit, None);
});
}
#[test]
fn ci_rustc_if_unchanged_do_not_invalidate_on_library_changes_outside_ci() {
git_test(|ctx| {
@ -433,14 +420,14 @@ mod defaults {
assert_eq!(first(cache.all::<doc::ErrorIndex>()), &[doc::ErrorIndex { target: a },]);
assert_eq!(
first(cache.all::<tool::ErrorIndex>()),
&[tool::ErrorIndex { compiler: Compiler::new(0, a) }]
&[tool::ErrorIndex { compiler: Compiler::new(1, a) }]
);
// docs should be built with the beta compiler, not with the stage0 artifacts.
// docs should be built with the stage0 compiler, not with the stage0 artifacts.
// recall that rustdoc is off-by-one: `stage` is the compiler rustdoc is _linked_ to,
// not the one it was built by.
assert_eq!(
first(cache.all::<tool::Rustdoc>()),
&[tool::Rustdoc { compiler: Compiler::new(0, a) },]
&[tool::Rustdoc { compiler: Compiler::new(1, a) },]
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -12,7 +12,8 @@ use tracing::instrument;
use crate::core::build_steps::perf::PerfArgs;
use crate::core::build_steps::setup::Profile;
use crate::core::builder::{Builder, Kind};
use crate::core::config::{Config, TargetSelectionList, target_selection_list};
use crate::core::config::Config;
use crate::core::config::target_selection::{TargetSelectionList, target_selection_list};
use crate::{Build, DocTests};
#[derive(Copy, Clone, Default, Debug, ValueEnum)]
@ -182,6 +183,11 @@ pub struct Flags {
/// Make bootstrap to behave as it's running on the CI environment or not.
#[arg(global = true, long, value_name = "bool")]
pub ci: Option<bool>,
/// Skip checking the standard library if `rust.download-rustc` isn't available.
/// This is mostly for RA as building the stage1 compiler to check the library tree
/// on each code change might be too much for some computers.
#[arg(global = true, long)]
pub skip_std_check_if_no_download_rustc: bool,
}
impl Flags {

View file

@ -1,7 +1,416 @@
//! Entry point for the `config` module.
//!
//! This module defines two macros:
//!
//! - `define_config!`: A declarative macro used instead of `#[derive(Deserialize)]` to reduce
//! compile time and binary size, especially for the bootstrap binary.
//!
//! - `check_ci_llvm!`: A compile-time assertion macro that ensures certain settings are
//! not enabled when `download-ci-llvm` is active.
//!
//! A declarative macro is used here in place of a procedural derive macro to minimize
//! the compile time of the bootstrap process.
//!
//! Additionally, this module defines common types, enums, and helper functions used across
//! various TOML configuration sections in `bootstrap.toml`.
//!
//! It provides shared definitions for:
//! - Data types deserialized from TOML.
//! - Utility enums for specific configuration options.
//! - Helper functions for managing configuration values.
#[expect(clippy::module_inception)]
mod config;
pub mod flags;
pub mod target_selection;
#[cfg(test)]
mod tests;
pub mod toml;
use std::collections::HashSet;
use std::path::PathBuf;
use build_helper::exit;
pub use config::*;
use serde::{Deserialize, Deserializer};
use serde_derive::Deserialize;
pub use target_selection::TargetSelection;
pub use toml::BUILDER_CONFIG_FILENAME;
pub use toml::change_id::ChangeId;
pub use toml::rust::LldMode;
pub use toml::target::Target;
use crate::Display;
use crate::str::FromStr;
// We are using a decl macro instead of a derive proc macro here to reduce the compile time of bootstrap.
#[macro_export]
macro_rules! define_config {
($(#[$attr:meta])* struct $name:ident {
$($field:ident: Option<$field_ty:ty> = $field_key:literal,)*
}) => {
$(#[$attr])*
pub struct $name {
$(pub $field: Option<$field_ty>,)*
}
impl Merge for $name {
fn merge(
&mut self,
_parent_config_path: Option<PathBuf>,
_included_extensions: &mut HashSet<PathBuf>,
other: Self,
replace: ReplaceOpt
) {
$(
match replace {
ReplaceOpt::IgnoreDuplicate => {
if self.$field.is_none() {
self.$field = other.$field;
}
},
ReplaceOpt::Override => {
if other.$field.is_some() {
self.$field = other.$field;
}
}
ReplaceOpt::ErrorOnDuplicate => {
if other.$field.is_some() {
if self.$field.is_some() {
if cfg!(test) {
panic!("overriding existing option")
} else {
eprintln!("overriding existing option: `{}`", stringify!($field));
exit!(2);
}
} else {
self.$field = other.$field;
}
}
}
}
)*
}
}
// The following is a trimmed version of what serde_derive generates. All parts not relevant
// for toml deserialization have been removed. This reduces the binary size and improves
// compile time of bootstrap.
impl<'de> Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct Field;
impl<'de> serde::de::Visitor<'de> for Field {
type Value = $name;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(concat!("struct ", stringify!($name)))
}
#[inline]
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
$(let mut $field: Option<$field_ty> = None;)*
while let Some(key) =
match serde::de::MapAccess::next_key::<String>(&mut map) {
Ok(val) => val,
Err(err) => {
return Err(err);
}
}
{
match &*key {
$($field_key => {
if $field.is_some() {
return Err(<A::Error as serde::de::Error>::duplicate_field(
$field_key,
));
}
$field = match serde::de::MapAccess::next_value::<$field_ty>(
&mut map,
) {
Ok(val) => Some(val),
Err(err) => {
return Err(err);
}
};
})*
key => {
return Err(serde::de::Error::unknown_field(key, FIELDS));
}
}
}
Ok($name { $($field),* })
}
}
const FIELDS: &'static [&'static str] = &[
$($field_key,)*
];
Deserializer::deserialize_struct(
deserializer,
stringify!($name),
FIELDS,
Field,
)
}
}
}
}
#[macro_export]
macro_rules! check_ci_llvm {
($name:expr) => {
assert!(
$name.is_none(),
"setting {} is incompatible with download-ci-llvm.",
stringify!($name).replace("_", "-")
);
};
}
pub(crate) trait Merge {
fn merge(
&mut self,
parent_config_path: Option<PathBuf>,
included_extensions: &mut HashSet<PathBuf>,
other: Self,
replace: ReplaceOpt,
);
}
impl<T> Merge for Option<T> {
fn merge(
&mut self,
_parent_config_path: Option<PathBuf>,
_included_extensions: &mut HashSet<PathBuf>,
other: Self,
replace: ReplaceOpt,
) {
match replace {
ReplaceOpt::IgnoreDuplicate => {
if self.is_none() {
*self = other;
}
}
ReplaceOpt::Override => {
if other.is_some() {
*self = other;
}
}
ReplaceOpt::ErrorOnDuplicate => {
if other.is_some() {
if self.is_some() {
if cfg!(test) {
panic!("overriding existing option")
} else {
eprintln!("overriding existing option");
exit!(2);
}
} else {
*self = other;
}
}
}
}
}
}
#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)]
pub enum DebuginfoLevel {
#[default]
None,
LineDirectivesOnly,
LineTablesOnly,
Limited,
Full,
}
// NOTE: can't derive(Deserialize) because the intermediate trip through toml::Value only
// deserializes i64, and derive() only generates visit_u64
impl<'de> Deserialize<'de> for DebuginfoLevel {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error;
Ok(match Deserialize::deserialize(deserializer)? {
StringOrInt::String(s) if s == "none" => DebuginfoLevel::None,
StringOrInt::Int(0) => DebuginfoLevel::None,
StringOrInt::String(s) if s == "line-directives-only" => {
DebuginfoLevel::LineDirectivesOnly
}
StringOrInt::String(s) if s == "line-tables-only" => DebuginfoLevel::LineTablesOnly,
StringOrInt::String(s) if s == "limited" => DebuginfoLevel::Limited,
StringOrInt::Int(1) => DebuginfoLevel::Limited,
StringOrInt::String(s) if s == "full" => DebuginfoLevel::Full,
StringOrInt::Int(2) => DebuginfoLevel::Full,
StringOrInt::Int(n) => {
let other = serde::de::Unexpected::Signed(n);
return Err(D::Error::invalid_value(other, &"expected 0, 1, or 2"));
}
StringOrInt::String(s) => {
let other = serde::de::Unexpected::Str(&s);
return Err(D::Error::invalid_value(
other,
&"expected none, line-tables-only, limited, or full",
));
}
})
}
}
/// Suitable for passing to `-C debuginfo`
impl Display for DebuginfoLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use DebuginfoLevel::*;
f.write_str(match self {
None => "0",
LineDirectivesOnly => "line-directives-only",
LineTablesOnly => "line-tables-only",
Limited => "1",
Full => "2",
})
}
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
#[serde(untagged)]
pub enum StringOrBool {
String(String),
Bool(bool),
}
impl Default for StringOrBool {
fn default() -> StringOrBool {
StringOrBool::Bool(false)
}
}
impl StringOrBool {
pub fn is_string_or_true(&self) -> bool {
matches!(self, Self::String(_) | Self::Bool(true))
}
}
#[derive(Deserialize)]
#[serde(untagged)]
pub enum StringOrInt {
String(String),
Int(i64),
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub enum LlvmLibunwind {
#[default]
No,
InTree,
System,
}
impl FromStr for LlvmLibunwind {
type Err = String;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value {
"no" => Ok(Self::No),
"in-tree" => Ok(Self::InTree),
"system" => Ok(Self::System),
invalid => Err(format!("Invalid value '{invalid}' for rust.llvm-libunwind config.")),
}
}
}
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum SplitDebuginfo {
Packed,
Unpacked,
#[default]
Off,
}
impl std::str::FromStr for SplitDebuginfo {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"packed" => Ok(SplitDebuginfo::Packed),
"unpacked" => Ok(SplitDebuginfo::Unpacked),
"off" => Ok(SplitDebuginfo::Off),
_ => Err(()),
}
}
}
/// Describes how to handle conflicts in merging two `TomlConfig`
#[derive(Copy, Clone, Debug)]
pub enum ReplaceOpt {
/// Silently ignore a duplicated value
IgnoreDuplicate,
/// Override the current value, even if it's `Some`
Override,
/// Exit with an error on duplicate values
ErrorOnDuplicate,
}
#[derive(Clone, Default)]
pub enum DryRun {
/// This isn't a dry run.
#[default]
Disabled,
/// This is a dry run enabled by bootstrap itself, so it can verify that no work is done.
SelfCheck,
/// This is a dry run enabled by the `--dry-run` flag.
UserSelected,
}
/// LTO mode used for compiling rustc itself.
#[derive(Default, Clone, PartialEq, Debug)]
pub enum RustcLto {
Off,
#[default]
ThinLocal,
Thin,
Fat,
}
impl std::str::FromStr for RustcLto {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"thin-local" => Ok(RustcLto::ThinLocal),
"thin" => Ok(RustcLto::Thin),
"fat" => Ok(RustcLto::Fat),
"off" => Ok(RustcLto::Off),
_ => Err(format!("Invalid value for rustc LTO: {s}")),
}
}
}
/// Determines how will GCC be provided.
#[derive(Default, Clone)]
pub enum GccCiMode {
/// Build GCC from the local `src/gcc` submodule.
#[default]
BuildLocally,
/// Try to download GCC from CI.
/// If it is not available on CI, it will be built locally instead.
DownloadFromCi,
}
pub fn set<T>(field: &mut T, val: Option<T>) {
if let Some(v) = val {
*field = v;
}
}
pub fn threads_from_config(v: u32) -> u32 {
match v {
0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32,
n => n,
}
}

View file

@ -0,0 +1,147 @@
use std::fmt;
use crate::core::config::SplitDebuginfo;
use crate::utils::cache::{INTERNER, Interned};
use crate::{Path, env};
#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
// N.B.: This type is used everywhere, and the entire codebase relies on it being Copy.
// Making !Copy is highly nontrivial!
pub struct TargetSelection {
pub triple: Interned<String>,
pub file: Option<Interned<String>>,
pub synthetic: bool,
}
/// Newtype over `Vec<TargetSelection>` so we can implement custom parsing logic
#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct TargetSelectionList(pub Vec<TargetSelection>);
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {
Ok(TargetSelectionList(
s.split(',').filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
))
}
impl TargetSelection {
pub fn from_user(selection: &str) -> Self {
let path = Path::new(selection);
let (triple, file) = if path.exists() {
let triple = path
.file_stem()
.expect("Target specification file has no file stem")
.to_str()
.expect("Target specification file stem is not UTF-8");
(triple, Some(selection))
} else {
(selection, None)
};
let triple = INTERNER.intern_str(triple);
let file = file.map(|f| INTERNER.intern_str(f));
Self { triple, file, synthetic: false }
}
pub fn create_synthetic(triple: &str, file: &str) -> Self {
Self {
triple: INTERNER.intern_str(triple),
file: Some(INTERNER.intern_str(file)),
synthetic: true,
}
}
pub fn rustc_target_arg(&self) -> &str {
self.file.as_ref().unwrap_or(&self.triple)
}
pub fn contains(&self, needle: &str) -> bool {
self.triple.contains(needle)
}
pub fn starts_with(&self, needle: &str) -> bool {
self.triple.starts_with(needle)
}
pub fn ends_with(&self, needle: &str) -> bool {
self.triple.ends_with(needle)
}
// See src/bootstrap/synthetic_targets.rs
pub fn is_synthetic(&self) -> bool {
self.synthetic
}
pub fn is_msvc(&self) -> bool {
self.contains("msvc")
}
pub fn is_windows(&self) -> bool {
self.contains("windows")
}
pub fn is_windows_gnu(&self) -> bool {
self.ends_with("windows-gnu")
}
pub fn is_cygwin(&self) -> bool {
self.is_windows() &&
// ref. https://cygwin.com/pipermail/cygwin/2022-February/250802.html
env::var("OSTYPE").is_ok_and(|v| v.to_lowercase().contains("cygwin"))
}
pub fn needs_crt_begin_end(&self) -> bool {
self.contains("musl") && !self.contains("unikraft")
}
/// Path to the file defining the custom target, if any.
pub fn filepath(&self) -> Option<&Path> {
self.file.as_ref().map(Path::new)
}
}
impl fmt::Display for TargetSelection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.triple)?;
if let Some(file) = self.file {
write!(f, "({file})")?;
}
Ok(())
}
}
impl fmt::Debug for TargetSelection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self}")
}
}
impl PartialEq<&str> for TargetSelection {
fn eq(&self, other: &&str) -> bool {
self.triple == *other
}
}
// Targets are often used as directory names throughout bootstrap.
// This impl makes it more ergonomics to use them as such.
impl AsRef<Path> for TargetSelection {
fn as_ref(&self) -> &Path {
self.triple.as_ref()
}
}
impl SplitDebuginfo {
/// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for
/// `rust.split-debuginfo` in `bootstrap.example.toml`.
pub fn default_for_platform(target: TargetSelection) -> Self {
if target.contains("apple") {
SplitDebuginfo::Unpacked
} else if target.is_windows() {
SplitDebuginfo::Packed
} else {
SplitDebuginfo::Off
}
}
}

View file

@ -10,12 +10,14 @@ use clap::CommandFactory;
use serde::Deserialize;
use super::flags::Flags;
use super::{ChangeIdWrapper, Config, RUSTC_IF_UNCHANGED_ALLOWED_PATHS};
use super::toml::change_id::ChangeIdWrapper;
use super::{Config, RUSTC_IF_UNCHANGED_ALLOWED_PATHS};
use crate::ChangeId;
use crate::core::build_steps::clippy::{LintConfig, get_clippy_rules_in_order};
use crate::core::build_steps::llvm;
use crate::core::build_steps::llvm::LLVM_INVALIDATION_PATHS;
use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig};
use crate::core::config::toml::TomlConfig;
use crate::core::config::{LldMode, Target, TargetSelection};
use crate::utils::tests::git::git_test;
pub(crate) fn parse(config: &str) -> Config {

View file

@ -0,0 +1,72 @@
//! This module defines the `Build` struct, which represents the `[build]` table
//! in the `bootstrap.toml` configuration file.
//!
//! The `[build]` table contains global options that influence the overall build process,
//! such as default host and target triples, paths to tools, build directories, and
//! various feature flags. These options apply across different stages and components
//! unless specifically overridden by other configuration sections or command-line flags.
use serde::{Deserialize, Deserializer};
use crate::core::config::toml::ReplaceOpt;
use crate::core::config::{Merge, StringOrBool};
use crate::{HashSet, PathBuf, define_config, exit};
define_config! {
/// TOML representation of various global build decisions.
#[derive(Default)]
struct Build {
build: Option<String> = "build",
description: Option<String> = "description",
host: Option<Vec<String>> = "host",
target: Option<Vec<String>> = "target",
build_dir: Option<String> = "build-dir",
cargo: Option<PathBuf> = "cargo",
rustc: Option<PathBuf> = "rustc",
rustfmt: Option<PathBuf> = "rustfmt",
cargo_clippy: Option<PathBuf> = "cargo-clippy",
docs: Option<bool> = "docs",
compiler_docs: Option<bool> = "compiler-docs",
library_docs_private_items: Option<bool> = "library-docs-private-items",
docs_minification: Option<bool> = "docs-minification",
submodules: Option<bool> = "submodules",
gdb: Option<String> = "gdb",
lldb: Option<String> = "lldb",
nodejs: Option<String> = "nodejs",
npm: Option<String> = "npm",
python: Option<String> = "python",
reuse: Option<String> = "reuse",
locked_deps: Option<bool> = "locked-deps",
vendor: Option<bool> = "vendor",
full_bootstrap: Option<bool> = "full-bootstrap",
bootstrap_cache_path: Option<PathBuf> = "bootstrap-cache-path",
extended: Option<bool> = "extended",
tools: Option<HashSet<String>> = "tools",
verbose: Option<usize> = "verbose",
sanitizers: Option<bool> = "sanitizers",
profiler: Option<bool> = "profiler",
cargo_native_static: Option<bool> = "cargo-native-static",
low_priority: Option<bool> = "low-priority",
configure_args: Option<Vec<String>> = "configure-args",
local_rebuild: Option<bool> = "local-rebuild",
print_step_timings: Option<bool> = "print-step-timings",
print_step_rusage: Option<bool> = "print-step-rusage",
check_stage: Option<u32> = "check-stage",
doc_stage: Option<u32> = "doc-stage",
build_stage: Option<u32> = "build-stage",
test_stage: Option<u32> = "test-stage",
install_stage: Option<u32> = "install-stage",
dist_stage: Option<u32> = "dist-stage",
bench_stage: Option<u32> = "bench-stage",
patch_binaries_for_nix: Option<bool> = "patch-binaries-for-nix",
// NOTE: only parsed by bootstrap.py, `--feature build-metrics` enables metrics unconditionally
metrics: Option<bool> = "metrics",
android_ndk: Option<PathBuf> = "android-ndk",
optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
jobs: Option<u32> = "jobs",
compiletest_diff_tool: Option<String> = "compiletest-diff-tool",
compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest",
ccache: Option<StringOrBool> = "ccache",
exclude: Option<Vec<PathBuf>> = "exclude",
}
}

View file

@ -0,0 +1,34 @@
use serde::{Deserialize, Deserializer};
use serde_derive::Deserialize;
/// This enum is used for deserializing change IDs from TOML, allowing both numeric values and the string `"ignore"`.
#[derive(Clone, Debug, PartialEq)]
pub enum ChangeId {
Ignore,
Id(usize),
}
/// Since we use `#[serde(deny_unknown_fields)]` on `TomlConfig`, we need a wrapper type
/// for the "change-id" field to parse it even if other fields are invalid. This ensures
/// that if deserialization fails due to other fields, we can still provide the changelogs
/// to allow developers to potentially find the reason for the failure in the logs..
#[derive(Deserialize, Default)]
pub(crate) struct ChangeIdWrapper {
#[serde(alias = "change-id", default, deserialize_with = "deserialize_change_id")]
pub(crate) inner: Option<ChangeId>,
}
fn deserialize_change_id<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<Option<ChangeId>, D::Error> {
let value = toml::Value::deserialize(deserializer)?;
Ok(match value {
toml::Value::String(s) if s == "ignore" => Some(ChangeId::Ignore),
toml::Value::Integer(i) => Some(ChangeId::Id(i as usize)),
_ => {
return Err(serde::de::Error::custom(
"expected \"ignore\" or an integer for change-id",
));
}
})
}

View file

@ -0,0 +1,52 @@
//! This module defines the `Dist` struct, which represents the `[dist]` table
//! in the `bootstrap.toml` configuration file.
//!
//! The `[dist]` table contains options related to the distribution process,
//! including signing, uploading artifacts, source tarballs, compression settings,
//! and inclusion of specific tools.
use serde::{Deserialize, Deserializer};
use crate::core::config::toml::ReplaceOpt;
use crate::core::config::{Merge, set};
use crate::{Config, HashSet, PathBuf, define_config, exit};
define_config! {
struct Dist {
sign_folder: Option<String> = "sign-folder",
upload_addr: Option<String> = "upload-addr",
src_tarball: Option<bool> = "src-tarball",
compression_formats: Option<Vec<String>> = "compression-formats",
compression_profile: Option<String> = "compression-profile",
include_mingw_linker: Option<bool> = "include-mingw-linker",
vendor: Option<bool> = "vendor",
}
}
impl Config {
/// Applies distribution-related configuration from the `Dist` struct
/// to the global `Config` structure.
pub fn apply_dist_config(&mut self, toml_dist: Option<Dist>) {
if let Some(dist) = toml_dist {
let Dist {
sign_folder,
upload_addr,
src_tarball,
compression_formats,
compression_profile,
include_mingw_linker,
vendor,
} = dist;
self.dist_sign_folder = sign_folder.map(PathBuf::from);
self.dist_upload_addr = upload_addr;
self.dist_compression_formats = compression_formats;
set(&mut self.dist_compression_profile, compression_profile);
set(&mut self.rust_dist_src, src_tarball);
set(&mut self.dist_include_mingw_linker, include_mingw_linker);
self.dist_vendor = vendor.unwrap_or_else(|| {
// If we're building from git or tarball sources, enable it by default.
self.rust_info.is_managed_git_subrepository() || self.rust_info.is_from_tarball()
});
}
}
}

View file

@ -0,0 +1,34 @@
//! This module defines the `Gcc` struct, which represents the `[gcc]` table
//! in the `bootstrap.toml` configuration file.
//!
//! The `[gcc]` table contains options specifically related to building or
//! acquiring the GCC compiler for use within the Rust build process.
use serde::{Deserialize, Deserializer};
use crate::core::config::toml::ReplaceOpt;
use crate::core::config::{GccCiMode, Merge};
use crate::{Config, HashSet, PathBuf, define_config, exit};
define_config! {
/// TOML representation of how the GCC build is configured.
struct Gcc {
download_ci_gcc: Option<bool> = "download-ci-gcc",
}
}
impl Config {
/// Applies GCC-related configuration from the `TomlGcc` struct to the
/// global `Config` structure.
pub fn apply_gcc_config(&mut self, toml_gcc: Option<Gcc>) {
if let Some(gcc) = toml_gcc {
self.gcc_ci_mode = match gcc.download_ci_gcc {
Some(value) => match value {
true => GccCiMode::DownloadFromCi,
false => GccCiMode::BuildLocally,
},
None => GccCiMode::default(),
};
}
}
}

View file

@ -0,0 +1,43 @@
//! This module defines the `Install` struct, which represents the `[install]` table
//! in the `bootstrap.toml` configuration file.
//!
//! The `[install]` table contains options that specify the installation paths
//! for various components of the Rust toolchain. These paths determine where
//! executables, libraries, documentation, and other files will be placed
//! during the `install` stage of the build.
use serde::{Deserialize, Deserializer};
use crate::core::config::toml::ReplaceOpt;
use crate::core::config::{Merge, set};
use crate::{Config, HashSet, PathBuf, define_config, exit};
define_config! {
/// TOML representation of various global install decisions.
struct Install {
prefix: Option<String> = "prefix",
sysconfdir: Option<String> = "sysconfdir",
docdir: Option<String> = "docdir",
bindir: Option<String> = "bindir",
libdir: Option<String> = "libdir",
mandir: Option<String> = "mandir",
datadir: Option<String> = "datadir",
}
}
impl Config {
/// Applies installation-related configuration from the `Install` struct
/// to the global `Config` structure.
pub fn apply_install_config(&mut self, toml_install: Option<Install>) {
if let Some(install) = toml_install {
let Install { prefix, sysconfdir, docdir, bindir, libdir, mandir, datadir } = install;
self.prefix = prefix.map(PathBuf::from);
self.sysconfdir = sysconfdir.map(PathBuf::from);
self.datadir = datadir.map(PathBuf::from);
self.docdir = docdir.map(PathBuf::from);
set(&mut self.bindir, bindir.map(PathBuf::from));
self.libdir = libdir.map(PathBuf::from);
self.mandir = mandir.map(PathBuf::from);
}
}
}

View file

@ -0,0 +1,284 @@
//! This module defines the `Llvm` struct, which represents the `[llvm]` table
//! in the `bootstrap.toml` configuration file.
use serde::{Deserialize, Deserializer};
use crate::core::config::toml::{Merge, ReplaceOpt, TomlConfig};
use crate::core::config::{StringOrBool, set};
use crate::{Config, HashMap, HashSet, PathBuf, define_config, exit};
define_config! {
/// TOML representation of how the LLVM build is configured.
struct Llvm {
optimize: Option<bool> = "optimize",
thin_lto: Option<bool> = "thin-lto",
release_debuginfo: Option<bool> = "release-debuginfo",
assertions: Option<bool> = "assertions",
tests: Option<bool> = "tests",
enzyme: Option<bool> = "enzyme",
plugins: Option<bool> = "plugins",
// FIXME: Remove this field at Q2 2025, it has been replaced by build.ccache
ccache: Option<StringOrBool> = "ccache",
static_libstdcpp: Option<bool> = "static-libstdcpp",
libzstd: Option<bool> = "libzstd",
ninja: Option<bool> = "ninja",
targets: Option<String> = "targets",
experimental_targets: Option<String> = "experimental-targets",
link_jobs: Option<u32> = "link-jobs",
link_shared: Option<bool> = "link-shared",
version_suffix: Option<String> = "version-suffix",
clang_cl: Option<String> = "clang-cl",
cflags: Option<String> = "cflags",
cxxflags: Option<String> = "cxxflags",
ldflags: Option<String> = "ldflags",
use_libcxx: Option<bool> = "use-libcxx",
use_linker: Option<String> = "use-linker",
allow_old_toolchain: Option<bool> = "allow-old-toolchain",
offload: Option<bool> = "offload",
polly: Option<bool> = "polly",
clang: Option<bool> = "clang",
enable_warnings: Option<bool> = "enable-warnings",
download_ci_llvm: Option<StringOrBool> = "download-ci-llvm",
build_config: Option<HashMap<String, String>> = "build-config",
}
}
/// Compares the current `Llvm` options against those in the CI LLVM builder and detects any incompatible options.
/// It does this by destructuring the `Llvm` instance to make sure every `Llvm` field is covered and not missing.
#[cfg(not(test))]
pub fn check_incompatible_options_for_ci_llvm(
current_config_toml: TomlConfig,
ci_config_toml: TomlConfig,
) -> Result<(), String> {
macro_rules! err {
($current:expr, $expected:expr) => {
if let Some(current) = &$current {
if Some(current) != $expected.as_ref() {
return Err(format!(
"ERROR: Setting `llvm.{}` is incompatible with `llvm.download-ci-llvm`. \
Current value: {:?}, Expected value(s): {}{:?}",
stringify!($expected).replace("_", "-"),
$current,
if $expected.is_some() { "None/" } else { "" },
$expected,
));
};
};
};
}
macro_rules! warn {
($current:expr, $expected:expr) => {
if let Some(current) = &$current {
if Some(current) != $expected.as_ref() {
println!(
"WARNING: `llvm.{}` has no effect with `llvm.download-ci-llvm`. \
Current value: {:?}, Expected value(s): {}{:?}",
stringify!($expected).replace("_", "-"),
$current,
if $expected.is_some() { "None/" } else { "" },
$expected,
);
};
};
};
}
let (Some(current_llvm_config), Some(ci_llvm_config)) =
(current_config_toml.llvm, ci_config_toml.llvm)
else {
return Ok(());
};
let Llvm {
optimize,
thin_lto,
release_debuginfo,
assertions: _,
tests: _,
plugins,
ccache: _,
static_libstdcpp: _,
libzstd,
ninja: _,
targets,
experimental_targets,
link_jobs: _,
link_shared: _,
version_suffix,
clang_cl,
cflags,
cxxflags,
ldflags,
use_libcxx,
use_linker,
allow_old_toolchain,
offload,
polly,
clang,
enable_warnings,
download_ci_llvm: _,
build_config,
enzyme,
} = ci_llvm_config;
err!(current_llvm_config.optimize, optimize);
err!(current_llvm_config.thin_lto, thin_lto);
err!(current_llvm_config.release_debuginfo, release_debuginfo);
err!(current_llvm_config.libzstd, libzstd);
err!(current_llvm_config.targets, targets);
err!(current_llvm_config.experimental_targets, experimental_targets);
err!(current_llvm_config.clang_cl, clang_cl);
err!(current_llvm_config.version_suffix, version_suffix);
err!(current_llvm_config.cflags, cflags);
err!(current_llvm_config.cxxflags, cxxflags);
err!(current_llvm_config.ldflags, ldflags);
err!(current_llvm_config.use_libcxx, use_libcxx);
err!(current_llvm_config.use_linker, use_linker);
err!(current_llvm_config.allow_old_toolchain, allow_old_toolchain);
err!(current_llvm_config.offload, offload);
err!(current_llvm_config.polly, polly);
err!(current_llvm_config.clang, clang);
err!(current_llvm_config.build_config, build_config);
err!(current_llvm_config.plugins, plugins);
err!(current_llvm_config.enzyme, enzyme);
warn!(current_llvm_config.enable_warnings, enable_warnings);
Ok(())
}
impl Config {
pub fn apply_llvm_config(
&mut self,
toml_llvm: Option<Llvm>,
ccache: &mut Option<StringOrBool>,
) {
let mut llvm_tests = None;
let mut llvm_enzyme = None;
let mut llvm_offload = None;
let mut llvm_plugins = None;
if let Some(llvm) = toml_llvm {
let Llvm {
optimize: optimize_toml,
thin_lto,
release_debuginfo,
assertions: _,
tests,
enzyme,
plugins,
ccache: llvm_ccache,
static_libstdcpp,
libzstd,
ninja,
targets,
experimental_targets,
link_jobs,
link_shared,
version_suffix,
clang_cl,
cflags,
cxxflags,
ldflags,
use_libcxx,
use_linker,
allow_old_toolchain,
offload,
polly,
clang,
enable_warnings,
download_ci_llvm,
build_config,
} = llvm;
if llvm_ccache.is_some() {
eprintln!("Warning: llvm.ccache is deprecated. Use build.ccache instead.");
}
if ccache.is_none() {
*ccache = llvm_ccache;
}
set(&mut self.ninja_in_file, ninja);
llvm_tests = tests;
llvm_enzyme = enzyme;
llvm_offload = offload;
llvm_plugins = plugins;
set(&mut self.llvm_optimize, optimize_toml);
set(&mut self.llvm_thin_lto, thin_lto);
set(&mut self.llvm_release_debuginfo, release_debuginfo);
set(&mut self.llvm_static_stdcpp, static_libstdcpp);
set(&mut self.llvm_libzstd, libzstd);
if let Some(v) = link_shared {
self.llvm_link_shared.set(Some(v));
}
self.llvm_targets.clone_from(&targets);
self.llvm_experimental_targets.clone_from(&experimental_targets);
self.llvm_link_jobs = link_jobs;
self.llvm_version_suffix.clone_from(&version_suffix);
self.llvm_clang_cl.clone_from(&clang_cl);
self.llvm_cflags.clone_from(&cflags);
self.llvm_cxxflags.clone_from(&cxxflags);
self.llvm_ldflags.clone_from(&ldflags);
set(&mut self.llvm_use_libcxx, use_libcxx);
self.llvm_use_linker.clone_from(&use_linker);
self.llvm_allow_old_toolchain = allow_old_toolchain.unwrap_or(false);
self.llvm_offload = offload.unwrap_or(false);
self.llvm_polly = polly.unwrap_or(false);
self.llvm_clang = clang.unwrap_or(false);
self.llvm_enable_warnings = enable_warnings.unwrap_or(false);
self.llvm_build_config = build_config.clone().unwrap_or(Default::default());
self.llvm_from_ci = self.parse_download_ci_llvm(download_ci_llvm, self.llvm_assertions);
if self.llvm_from_ci {
let warn = |option: &str| {
println!(
"WARNING: `{option}` will only be used on `compiler/rustc_llvm` build, not for the LLVM build."
);
println!(
"HELP: To use `{option}` for LLVM builds, set `download-ci-llvm` option to false."
);
};
if static_libstdcpp.is_some() {
warn("static-libstdcpp");
}
if link_shared.is_some() {
warn("link-shared");
}
// FIXME(#129153): instead of all the ad-hoc `download-ci-llvm` checks that follow,
// use the `builder-config` present in tarballs since #128822 to compare the local
// config to the ones used to build the LLVM artifacts on CI, and only notify users
// if they've chosen a different value.
if libzstd.is_some() {
println!(
"WARNING: when using `download-ci-llvm`, the local `llvm.libzstd` option, \
like almost all `llvm.*` options, will be ignored and set by the LLVM CI \
artifacts builder config."
);
println!(
"HELP: To use `llvm.libzstd` for LLVM/LLD builds, set `download-ci-llvm` option to false."
);
}
}
if !self.llvm_from_ci && self.llvm_thin_lto && link_shared.is_none() {
// If we're building with ThinLTO on, by default we want to link
// to LLVM shared, to avoid re-doing ThinLTO (which happens in
// the link step) with each stage.
self.llvm_link_shared.set(Some(true));
}
} else {
self.llvm_from_ci = self.parse_download_ci_llvm(None, false);
}
self.llvm_tests = llvm_tests.unwrap_or(false);
self.llvm_enzyme = llvm_enzyme.unwrap_or(false);
self.llvm_offload = llvm_offload.unwrap_or(false);
self.llvm_plugins = llvm_plugins.unwrap_or(false);
}
}

View file

@ -0,0 +1,184 @@
//! This module defines the structures that directly mirror the `bootstrap.toml`
//! file's format. These types are used for `serde` deserialization.
//!
//! Crucially, this module also houses the core logic for loading, parsing, and merging
//! these raw TOML configurations from various sources (the main `bootstrap.toml`,
//! included files, profile defaults, and command-line overrides). This processed
//! TOML data then serves as an intermediate representation, which is further
//! transformed and applied to the final [`Config`] struct.
use serde::Deserialize;
use serde_derive::Deserialize;
pub mod build;
pub mod change_id;
pub mod dist;
pub mod gcc;
pub mod install;
pub mod llvm;
pub mod rust;
pub mod target;
use build::Build;
use change_id::{ChangeId, ChangeIdWrapper};
use dist::Dist;
use gcc::Gcc;
use install::Install;
use llvm::Llvm;
use rust::Rust;
use target::TomlTarget;
use crate::core::config::{Merge, ReplaceOpt};
use crate::{Config, HashMap, HashSet, Path, PathBuf, exit, fs, t};
/// Structure of the `bootstrap.toml` file that configuration is read from.
///
/// This structure uses `Decodable` to automatically decode a TOML configuration
/// file into this format, and then this is traversed and written into the above
/// `Config` structure.
#[derive(Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub(crate) struct TomlConfig {
#[serde(flatten)]
pub(crate) change_id: ChangeIdWrapper,
pub(super) build: Option<Build>,
pub(super) install: Option<Install>,
pub(super) llvm: Option<Llvm>,
pub(super) gcc: Option<Gcc>,
pub(super) rust: Option<Rust>,
pub(super) target: Option<HashMap<String, TomlTarget>>,
pub(super) dist: Option<Dist>,
pub(super) profile: Option<String>,
pub(super) include: Option<Vec<PathBuf>>,
}
impl Merge for TomlConfig {
fn merge(
&mut self,
parent_config_path: Option<PathBuf>,
included_extensions: &mut HashSet<PathBuf>,
TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id, include }: Self,
replace: ReplaceOpt,
) {
fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
if let Some(new) = y {
if let Some(original) = x {
original.merge(None, &mut Default::default(), new, replace);
} else {
*x = Some(new);
}
}
}
self.change_id.inner.merge(None, &mut Default::default(), change_id.inner, replace);
self.profile.merge(None, &mut Default::default(), profile, replace);
do_merge(&mut self.build, build, replace);
do_merge(&mut self.install, install, replace);
do_merge(&mut self.llvm, llvm, replace);
do_merge(&mut self.gcc, gcc, replace);
do_merge(&mut self.rust, rust, replace);
do_merge(&mut self.dist, dist, replace);
match (self.target.as_mut(), target) {
(_, None) => {}
(None, Some(target)) => self.target = Some(target),
(Some(original_target), Some(new_target)) => {
for (triple, new) in new_target {
if let Some(original) = original_target.get_mut(&triple) {
original.merge(None, &mut Default::default(), new, replace);
} else {
original_target.insert(triple, new);
}
}
}
}
let parent_dir = parent_config_path
.as_ref()
.and_then(|p| p.parent().map(ToOwned::to_owned))
.unwrap_or_default();
// `include` handled later since we ignore duplicates using `ReplaceOpt::IgnoreDuplicate` to
// keep the upper-level configuration to take precedence.
for include_path in include.clone().unwrap_or_default().iter().rev() {
let include_path = parent_dir.join(include_path);
let include_path = include_path.canonicalize().unwrap_or_else(|e| {
eprintln!("ERROR: Failed to canonicalize '{}' path: {e}", include_path.display());
exit!(2);
});
let included_toml = Config::get_toml_inner(&include_path).unwrap_or_else(|e| {
eprintln!("ERROR: Failed to parse '{}': {e}", include_path.display());
exit!(2);
});
assert!(
included_extensions.insert(include_path.clone()),
"Cyclic inclusion detected: '{}' is being included again before its previous inclusion was fully processed.",
include_path.display()
);
self.merge(
Some(include_path.clone()),
included_extensions,
included_toml,
// Ensures that parent configuration always takes precedence
// over child configurations.
ReplaceOpt::IgnoreDuplicate,
);
included_extensions.remove(&include_path);
}
}
}
/// This file is embedded in the overlay directory of the tarball sources. It is
/// useful in scenarios where developers want to see how the tarball sources were
/// generated.
///
/// We also use this file to compare the host's bootstrap.toml against the CI rustc builder
/// configuration to detect any incompatible options.
pub const BUILDER_CONFIG_FILENAME: &str = "builder-config";
impl Config {
pub(crate) fn get_builder_toml(&self, build_name: &str) -> Result<TomlConfig, toml::de::Error> {
if self.dry_run() {
return Ok(TomlConfig::default());
}
let builder_config_path =
self.out.join(self.build.triple).join(build_name).join(BUILDER_CONFIG_FILENAME);
Self::get_toml(&builder_config_path)
}
pub(crate) fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
#[cfg(test)]
return Ok(TomlConfig::default());
#[cfg(not(test))]
Self::get_toml_inner(file)
}
pub(crate) fn get_toml_inner(file: &Path) -> Result<TomlConfig, toml::de::Error> {
let contents =
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
// TomlConfig and sub types to be monomorphized 5x by toml.
toml::from_str(&contents)
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
.inspect_err(|_| {
if let Ok(ChangeIdWrapper { inner: Some(ChangeId::Id(id)) }) =
toml::from_str::<toml::Value>(&contents)
.and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table))
{
let changes = crate::find_recent_config_change_ids(id);
if !changes.is_empty() {
println!(
"WARNING: There have been changes to x.py since you last updated:\n{}",
crate::human_readable_changes(changes)
);
}
}
})
}
}

View file

@ -0,0 +1,664 @@
//! This module defines the `Rust` struct, which represents the `[rust]` table
//! in the `bootstrap.toml` configuration file.
use std::str::FromStr;
use serde::{Deserialize, Deserializer};
use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX;
use crate::core::config::toml::TomlConfig;
use crate::core::config::{
DebuginfoLevel, Merge, ReplaceOpt, RustcLto, StringOrBool, set, threads_from_config,
};
use crate::flags::Warnings;
use crate::{BTreeSet, Config, HashSet, PathBuf, TargetSelection, define_config, exit};
define_config! {
/// TOML representation of how the Rust build is configured.
struct Rust {
optimize: Option<RustOptimize> = "optimize",
debug: Option<bool> = "debug",
codegen_units: Option<u32> = "codegen-units",
codegen_units_std: Option<u32> = "codegen-units-std",
rustc_debug_assertions: Option<bool> = "debug-assertions",
randomize_layout: Option<bool> = "randomize-layout",
std_debug_assertions: Option<bool> = "debug-assertions-std",
tools_debug_assertions: Option<bool> = "debug-assertions-tools",
overflow_checks: Option<bool> = "overflow-checks",
overflow_checks_std: Option<bool> = "overflow-checks-std",
debug_logging: Option<bool> = "debug-logging",
debuginfo_level: Option<DebuginfoLevel> = "debuginfo-level",
debuginfo_level_rustc: Option<DebuginfoLevel> = "debuginfo-level-rustc",
debuginfo_level_std: Option<DebuginfoLevel> = "debuginfo-level-std",
debuginfo_level_tools: Option<DebuginfoLevel> = "debuginfo-level-tools",
debuginfo_level_tests: Option<DebuginfoLevel> = "debuginfo-level-tests",
backtrace: Option<bool> = "backtrace",
incremental: Option<bool> = "incremental",
default_linker: Option<String> = "default-linker",
channel: Option<String> = "channel",
// FIXME: Remove this field at Q2 2025, it has been replaced by build.description
description: Option<String> = "description",
musl_root: Option<String> = "musl-root",
rpath: Option<bool> = "rpath",
strip: Option<bool> = "strip",
frame_pointers: Option<bool> = "frame-pointers",
stack_protector: Option<String> = "stack-protector",
verbose_tests: Option<bool> = "verbose-tests",
optimize_tests: Option<bool> = "optimize-tests",
codegen_tests: Option<bool> = "codegen-tests",
omit_git_hash: Option<bool> = "omit-git-hash",
dist_src: Option<bool> = "dist-src",
save_toolstates: Option<String> = "save-toolstates",
codegen_backends: Option<Vec<String>> = "codegen-backends",
llvm_bitcode_linker: Option<bool> = "llvm-bitcode-linker",
lld: Option<bool> = "lld",
lld_mode: Option<LldMode> = "use-lld",
llvm_tools: Option<bool> = "llvm-tools",
deny_warnings: Option<bool> = "deny-warnings",
backtrace_on_ice: Option<bool> = "backtrace-on-ice",
verify_llvm_ir: Option<bool> = "verify-llvm-ir",
thin_lto_import_instr_limit: Option<u32> = "thin-lto-import-instr-limit",
remap_debuginfo: Option<bool> = "remap-debuginfo",
jemalloc: Option<bool> = "jemalloc",
test_compare_mode: Option<bool> = "test-compare-mode",
llvm_libunwind: Option<String> = "llvm-libunwind",
control_flow_guard: Option<bool> = "control-flow-guard",
ehcont_guard: Option<bool> = "ehcont-guard",
new_symbol_mangling: Option<bool> = "new-symbol-mangling",
profile_generate: Option<String> = "profile-generate",
profile_use: Option<String> = "profile-use",
// ignored; this is set from an env var set by bootstrap.py
download_rustc: Option<StringOrBool> = "download-rustc",
lto: Option<String> = "lto",
validate_mir_opts: Option<u32> = "validate-mir-opts",
std_features: Option<BTreeSet<String>> = "std-features",
}
}
/// LLD in bootstrap works like this:
/// - Self-contained lld: use `rust-lld` from the compiler's sysroot
/// - External: use an external `lld` binary
///
/// It is configured depending on the target:
/// 1) Everything except MSVC
/// - Self-contained: `-Clinker-flavor=gnu-lld-cc -Clink-self-contained=+linker`
/// - External: `-Clinker-flavor=gnu-lld-cc`
/// 2) MSVC
/// - Self-contained: `-Clinker=<path to rust-lld>`
/// - External: `-Clinker=lld`
#[derive(Copy, Clone, Default, Debug, PartialEq)]
pub enum LldMode {
/// Do not use LLD
#[default]
Unused,
/// Use `rust-lld` from the compiler's sysroot
SelfContained,
/// Use an externally provided `lld` binary.
/// Note that the linker name cannot be overridden, the binary has to be named `lld` and it has
/// to be in $PATH.
External,
}
impl LldMode {
pub fn is_used(&self) -> bool {
match self {
LldMode::SelfContained | LldMode::External => true,
LldMode::Unused => false,
}
}
}
impl<'de> Deserialize<'de> for LldMode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct LldModeVisitor;
impl serde::de::Visitor<'_> for LldModeVisitor {
type Value = LldMode;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("one of true, 'self-contained' or 'external'")
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(if v { LldMode::External } else { LldMode::Unused })
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match v {
"external" => Ok(LldMode::External),
"self-contained" => Ok(LldMode::SelfContained),
_ => Err(E::custom(format!("unknown mode {v}"))),
}
}
}
deserializer.deserialize_any(LldModeVisitor)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum RustOptimize {
String(String),
Int(u8),
Bool(bool),
}
impl Default for RustOptimize {
fn default() -> RustOptimize {
RustOptimize::Bool(false)
}
}
impl<'de> Deserialize<'de> for RustOptimize {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(OptimizeVisitor)
}
}
struct OptimizeVisitor;
impl serde::de::Visitor<'_> for OptimizeVisitor {
type Value = RustOptimize;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str(r#"one of: 0, 1, 2, 3, "s", "z", true, false"#)
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if matches!(value, "s" | "z") {
Ok(RustOptimize::String(value.to_string()))
} else {
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
}
}
fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if matches!(value, 0..=3) {
Ok(RustOptimize::Int(value as u8))
} else {
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
}
}
fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(RustOptimize::Bool(value))
}
}
fn format_optimize_error_msg(v: impl std::fmt::Display) -> String {
format!(
r#"unrecognized option for rust optimize: "{v}", expected one of 0, 1, 2, 3, "s", "z", true, false"#
)
}
impl RustOptimize {
pub(crate) fn is_release(&self) -> bool {
match &self {
RustOptimize::Bool(true) | RustOptimize::String(_) => true,
RustOptimize::Int(i) => *i > 0,
RustOptimize::Bool(false) => false,
}
}
pub(crate) fn get_opt_level(&self) -> Option<String> {
match &self {
RustOptimize::String(s) => Some(s.clone()),
RustOptimize::Int(i) => Some(i.to_string()),
RustOptimize::Bool(_) => None,
}
}
}
/// Compares the current Rust options against those in the CI rustc builder and detects any incompatible options.
/// It does this by destructuring the `Rust` instance to make sure every `Rust` field is covered and not missing.
pub fn check_incompatible_options_for_ci_rustc(
host: TargetSelection,
current_config_toml: TomlConfig,
ci_config_toml: TomlConfig,
) -> Result<(), String> {
macro_rules! err {
($current:expr, $expected:expr, $config_section:expr) => {
if let Some(current) = &$current {
if Some(current) != $expected.as_ref() {
return Err(format!(
"ERROR: Setting `{}` is incompatible with `rust.download-rustc`. \
Current value: {:?}, Expected value(s): {}{:?}",
format!("{}.{}", $config_section, stringify!($expected).replace("_", "-")),
$current,
if $expected.is_some() { "None/" } else { "" },
$expected,
));
};
};
};
}
macro_rules! warn {
($current:expr, $expected:expr, $config_section:expr) => {
if let Some(current) = &$current {
if Some(current) != $expected.as_ref() {
println!(
"WARNING: `{}` has no effect with `rust.download-rustc`. \
Current value: {:?}, Expected value(s): {}{:?}",
format!("{}.{}", $config_section, stringify!($expected).replace("_", "-")),
$current,
if $expected.is_some() { "None/" } else { "" },
$expected,
);
};
};
};
}
let current_profiler = current_config_toml.build.as_ref().and_then(|b| b.profiler);
let profiler = ci_config_toml.build.as_ref().and_then(|b| b.profiler);
err!(current_profiler, profiler, "build");
let current_optimized_compiler_builtins =
current_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins);
let optimized_compiler_builtins =
ci_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins);
err!(current_optimized_compiler_builtins, optimized_compiler_builtins, "build");
// We always build the in-tree compiler on cross targets, so we only care
// about the host target here.
let host_str = host.to_string();
if let Some(current_cfg) = current_config_toml.target.as_ref().and_then(|c| c.get(&host_str))
&& current_cfg.profiler.is_some()
{
let ci_target_toml = ci_config_toml.target.as_ref().and_then(|c| c.get(&host_str));
let ci_cfg = ci_target_toml.ok_or(format!(
"Target specific config for '{host_str}' is not present for CI-rustc"
))?;
let profiler = &ci_cfg.profiler;
err!(current_cfg.profiler, profiler, "build");
let optimized_compiler_builtins = &ci_cfg.optimized_compiler_builtins;
err!(current_cfg.optimized_compiler_builtins, optimized_compiler_builtins, "build");
}
let (Some(current_rust_config), Some(ci_rust_config)) =
(current_config_toml.rust, ci_config_toml.rust)
else {
return Ok(());
};
let Rust {
// Following options are the CI rustc incompatible ones.
optimize,
randomize_layout,
debug_logging,
debuginfo_level_rustc,
llvm_tools,
llvm_bitcode_linker,
lto,
stack_protector,
strip,
lld_mode,
jemalloc,
rpath,
channel,
description,
incremental,
default_linker,
std_features,
// Rest of the options can simply be ignored.
debug: _,
codegen_units: _,
codegen_units_std: _,
rustc_debug_assertions: _,
std_debug_assertions: _,
tools_debug_assertions: _,
overflow_checks: _,
overflow_checks_std: _,
debuginfo_level: _,
debuginfo_level_std: _,
debuginfo_level_tools: _,
debuginfo_level_tests: _,
backtrace: _,
musl_root: _,
verbose_tests: _,
optimize_tests: _,
codegen_tests: _,
omit_git_hash: _,
dist_src: _,
save_toolstates: _,
codegen_backends: _,
lld: _,
deny_warnings: _,
backtrace_on_ice: _,
verify_llvm_ir: _,
thin_lto_import_instr_limit: _,
remap_debuginfo: _,
test_compare_mode: _,
llvm_libunwind: _,
control_flow_guard: _,
ehcont_guard: _,
new_symbol_mangling: _,
profile_generate: _,
profile_use: _,
download_rustc: _,
validate_mir_opts: _,
frame_pointers: _,
} = ci_rust_config;
// There are two kinds of checks for CI rustc incompatible options:
// 1. Checking an option that may change the compiler behaviour/output.
// 2. Checking an option that have no effect on the compiler behaviour/output.
//
// If the option belongs to the first category, we call `err` macro for a hard error;
// otherwise, we just print a warning with `warn` macro.
err!(current_rust_config.optimize, optimize, "rust");
err!(current_rust_config.randomize_layout, randomize_layout, "rust");
err!(current_rust_config.debug_logging, debug_logging, "rust");
err!(current_rust_config.debuginfo_level_rustc, debuginfo_level_rustc, "rust");
err!(current_rust_config.rpath, rpath, "rust");
err!(current_rust_config.strip, strip, "rust");
err!(current_rust_config.lld_mode, lld_mode, "rust");
err!(current_rust_config.llvm_tools, llvm_tools, "rust");
err!(current_rust_config.llvm_bitcode_linker, llvm_bitcode_linker, "rust");
err!(current_rust_config.jemalloc, jemalloc, "rust");
err!(current_rust_config.default_linker, default_linker, "rust");
err!(current_rust_config.stack_protector, stack_protector, "rust");
err!(current_rust_config.lto, lto, "rust");
err!(current_rust_config.std_features, std_features, "rust");
warn!(current_rust_config.channel, channel, "rust");
warn!(current_rust_config.description, description, "rust");
warn!(current_rust_config.incremental, incremental, "rust");
Ok(())
}
impl Config {
pub fn apply_rust_config(
&mut self,
toml_rust: Option<Rust>,
warnings: Warnings,
description: &mut Option<String>,
) {
let mut debug = None;
let mut rustc_debug_assertions = None;
let mut std_debug_assertions = None;
let mut tools_debug_assertions = None;
let mut overflow_checks = None;
let mut overflow_checks_std = None;
let mut debug_logging = None;
let mut debuginfo_level = None;
let mut debuginfo_level_rustc = None;
let mut debuginfo_level_std = None;
let mut debuginfo_level_tools = None;
let mut debuginfo_level_tests = None;
let mut optimize = None;
let mut lld_enabled = None;
let mut std_features = None;
if let Some(rust) = toml_rust {
let Rust {
optimize: optimize_toml,
debug: debug_toml,
codegen_units,
codegen_units_std,
rustc_debug_assertions: rustc_debug_assertions_toml,
std_debug_assertions: std_debug_assertions_toml,
tools_debug_assertions: tools_debug_assertions_toml,
overflow_checks: overflow_checks_toml,
overflow_checks_std: overflow_checks_std_toml,
debug_logging: debug_logging_toml,
debuginfo_level: debuginfo_level_toml,
debuginfo_level_rustc: debuginfo_level_rustc_toml,
debuginfo_level_std: debuginfo_level_std_toml,
debuginfo_level_tools: debuginfo_level_tools_toml,
debuginfo_level_tests: debuginfo_level_tests_toml,
backtrace,
incremental,
randomize_layout,
default_linker,
channel: _, // already handled above
description: rust_description,
musl_root,
rpath,
verbose_tests,
optimize_tests,
codegen_tests,
omit_git_hash: _, // already handled above
dist_src,
save_toolstates,
codegen_backends,
lld: lld_enabled_toml,
llvm_tools,
llvm_bitcode_linker,
deny_warnings,
backtrace_on_ice,
verify_llvm_ir,
thin_lto_import_instr_limit,
remap_debuginfo,
jemalloc,
test_compare_mode,
llvm_libunwind,
control_flow_guard,
ehcont_guard,
new_symbol_mangling,
profile_generate,
profile_use,
download_rustc,
lto,
validate_mir_opts,
frame_pointers,
stack_protector,
strip,
lld_mode,
std_features: std_features_toml,
} = rust;
// FIXME(#133381): alt rustc builds currently do *not* have rustc debug assertions
// enabled. We should not download a CI alt rustc if we need rustc to have debug
// assertions (e.g. for crashes test suite). This can be changed once something like
// [Enable debug assertions on alt
// builds](https://github.com/rust-lang/rust/pull/131077) lands.
//
// Note that `rust.debug = true` currently implies `rust.debug-assertions = true`!
//
// This relies also on the fact that the global default for `download-rustc` will be
// `false` if it's not explicitly set.
let debug_assertions_requested = matches!(rustc_debug_assertions_toml, Some(true))
|| (matches!(debug_toml, Some(true))
&& !matches!(rustc_debug_assertions_toml, Some(false)));
if debug_assertions_requested
&& let Some(ref opt) = download_rustc
&& opt.is_string_or_true()
{
eprintln!(
"WARN: currently no CI rustc builds have rustc debug assertions \
enabled. Please either set `rust.debug-assertions` to `false` if you \
want to use download CI rustc or set `rust.download-rustc` to `false`."
);
}
self.download_rustc_commit = self.download_ci_rustc_commit(
download_rustc,
debug_assertions_requested,
self.llvm_assertions,
);
debug = debug_toml;
rustc_debug_assertions = rustc_debug_assertions_toml;
std_debug_assertions = std_debug_assertions_toml;
tools_debug_assertions = tools_debug_assertions_toml;
overflow_checks = overflow_checks_toml;
overflow_checks_std = overflow_checks_std_toml;
debug_logging = debug_logging_toml;
debuginfo_level = debuginfo_level_toml;
debuginfo_level_rustc = debuginfo_level_rustc_toml;
debuginfo_level_std = debuginfo_level_std_toml;
debuginfo_level_tools = debuginfo_level_tools_toml;
debuginfo_level_tests = debuginfo_level_tests_toml;
lld_enabled = lld_enabled_toml;
std_features = std_features_toml;
optimize = optimize_toml;
self.rust_new_symbol_mangling = new_symbol_mangling;
set(&mut self.rust_optimize_tests, optimize_tests);
set(&mut self.codegen_tests, codegen_tests);
set(&mut self.rust_rpath, rpath);
set(&mut self.rust_strip, strip);
set(&mut self.rust_frame_pointers, frame_pointers);
self.rust_stack_protector = stack_protector;
set(&mut self.jemalloc, jemalloc);
set(&mut self.test_compare_mode, test_compare_mode);
set(&mut self.backtrace, backtrace);
if rust_description.is_some() {
eprintln!(
"Warning: rust.description is deprecated. Use build.description instead."
);
}
if description.is_none() {
*description = rust_description;
}
set(&mut self.rust_dist_src, dist_src);
set(&mut self.verbose_tests, verbose_tests);
// in the case "false" is set explicitly, do not overwrite the command line args
if let Some(true) = incremental {
self.incremental = true;
}
set(&mut self.lld_mode, lld_mode);
set(&mut self.llvm_bitcode_linker_enabled, llvm_bitcode_linker);
self.rust_randomize_layout = randomize_layout.unwrap_or_default();
self.llvm_tools_enabled = llvm_tools.unwrap_or(true);
self.llvm_enzyme = self.channel == "dev" || self.channel == "nightly";
self.rustc_default_linker = default_linker;
self.musl_root = musl_root.map(PathBuf::from);
self.save_toolstates = save_toolstates.map(PathBuf::from);
set(
&mut self.deny_warnings,
match warnings {
Warnings::Deny => Some(true),
Warnings::Warn => Some(false),
Warnings::Default => deny_warnings,
},
);
set(&mut self.backtrace_on_ice, backtrace_on_ice);
set(&mut self.rust_verify_llvm_ir, verify_llvm_ir);
self.rust_thin_lto_import_instr_limit = thin_lto_import_instr_limit;
set(&mut self.rust_remap_debuginfo, remap_debuginfo);
set(&mut self.control_flow_guard, control_flow_guard);
set(&mut self.ehcont_guard, ehcont_guard);
self.llvm_libunwind_default =
llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind"));
if let Some(ref backends) = codegen_backends {
let available_backends = ["llvm", "cranelift", "gcc"];
self.rust_codegen_backends = backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
if available_backends.contains(&backend) {
panic!("Invalid value '{s}' for 'rust.codegen-backends'. Instead, please use '{backend}'.");
} else {
println!("HELP: '{s}' for 'rust.codegen-backends' might fail. \
Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \
In this case, it would be referred to as '{backend}'.");
}
}
s.clone()
}).collect();
}
self.rust_codegen_units = codegen_units.map(threads_from_config);
self.rust_codegen_units_std = codegen_units_std.map(threads_from_config);
if self.rust_profile_use.is_none() {
self.rust_profile_use = profile_use;
}
if self.rust_profile_generate.is_none() {
self.rust_profile_generate = profile_generate;
}
self.rust_lto =
lto.as_deref().map(|value| RustcLto::from_str(value).unwrap()).unwrap_or_default();
self.rust_validate_mir_opts = validate_mir_opts;
}
self.rust_optimize = optimize.unwrap_or(RustOptimize::Bool(true));
// We make `x86_64-unknown-linux-gnu` use the self-contained linker by default, so we will
// build our internal lld and use it as the default linker, by setting the `rust.lld` config
// to true by default:
// - on the `x86_64-unknown-linux-gnu` target
// - on the `dev` and `nightly` channels
// - when building our in-tree llvm (i.e. the target has not set an `llvm-config`), so that
// we're also able to build the corresponding lld
// - or when using an external llvm that's downloaded from CI, which also contains our prebuilt
// lld
// - otherwise, we'd be using an external llvm, and lld would not necessarily available and
// thus, disabled
// - similarly, lld will not be built nor used by default when explicitly asked not to, e.g.
// when the config sets `rust.lld = false`
if self.build.triple == "x86_64-unknown-linux-gnu"
&& self.hosts == [self.build]
&& (self.channel == "dev" || self.channel == "nightly")
{
let no_llvm_config = self
.target_config
.get(&self.build)
.is_some_and(|target_config| target_config.llvm_config.is_none());
let enable_lld = self.llvm_from_ci || no_llvm_config;
// Prefer the config setting in case an explicit opt-out is needed.
self.lld_enabled = lld_enabled.unwrap_or(enable_lld);
} else {
set(&mut self.lld_enabled, lld_enabled);
}
let default_std_features = BTreeSet::from([String::from("panic-unwind")]);
self.rust_std_features = std_features.unwrap_or(default_std_features);
let default = debug == Some(true);
self.rustc_debug_assertions = rustc_debug_assertions.unwrap_or(default);
self.std_debug_assertions = std_debug_assertions.unwrap_or(self.rustc_debug_assertions);
self.tools_debug_assertions = tools_debug_assertions.unwrap_or(self.rustc_debug_assertions);
self.rust_overflow_checks = overflow_checks.unwrap_or(default);
self.rust_overflow_checks_std = overflow_checks_std.unwrap_or(self.rust_overflow_checks);
self.rust_debug_logging = debug_logging.unwrap_or(self.rustc_debug_assertions);
let with_defaults = |debuginfo_level_specific: Option<_>| {
debuginfo_level_specific.or(debuginfo_level).unwrap_or(if debug == Some(true) {
DebuginfoLevel::Limited
} else {
DebuginfoLevel::None
})
};
self.rust_debuginfo_level_rustc = with_defaults(debuginfo_level_rustc);
self.rust_debuginfo_level_std = with_defaults(debuginfo_level_std);
self.rust_debuginfo_level_tools = with_defaults(debuginfo_level_tools);
self.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(DebuginfoLevel::None);
}
}

View file

@ -0,0 +1,174 @@
//! This module defines the structures and logic for handling target-specific configuration
//! within the `bootstrap.toml` file. This allows you to customize build settings, tools,
//! and flags for individual compilation targets.
//!
//! It includes:
//!
//! * [`TomlTarget`]: This struct directly mirrors the `[target.<triple>]` sections in your
//! `bootstrap.toml`. It's used for deserializing raw TOML data for a specific target.
//! * [`Target`]: This struct represents the processed and validated configuration for a
//! build target, which is is stored in the main [`Config`] structure.
//! * [`Config::apply_target_config`]: This method processes the `TomlTarget` data and
//! applies it to the global [`Config`], ensuring proper path resolution, validation,
//! and integration with other build settings.
use std::collections::HashMap;
use serde::{Deserialize, Deserializer};
use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX;
use crate::core::config::{LlvmLibunwind, Merge, ReplaceOpt, SplitDebuginfo, StringOrBool};
use crate::{Config, HashSet, PathBuf, TargetSelection, define_config, exit};
define_config! {
/// TOML representation of how each build target is configured.
struct TomlTarget {
cc: Option<String> = "cc",
cxx: Option<String> = "cxx",
ar: Option<String> = "ar",
ranlib: Option<String> = "ranlib",
default_linker: Option<PathBuf> = "default-linker",
linker: Option<String> = "linker",
split_debuginfo: Option<String> = "split-debuginfo",
llvm_config: Option<String> = "llvm-config",
llvm_has_rust_patches: Option<bool> = "llvm-has-rust-patches",
llvm_filecheck: Option<String> = "llvm-filecheck",
llvm_libunwind: Option<String> = "llvm-libunwind",
sanitizers: Option<bool> = "sanitizers",
profiler: Option<StringOrBool> = "profiler",
rpath: Option<bool> = "rpath",
crt_static: Option<bool> = "crt-static",
musl_root: Option<String> = "musl-root",
musl_libdir: Option<String> = "musl-libdir",
wasi_root: Option<String> = "wasi-root",
qemu_rootfs: Option<String> = "qemu-rootfs",
no_std: Option<bool> = "no-std",
codegen_backends: Option<Vec<String>> = "codegen-backends",
runner: Option<String> = "runner",
optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
jemalloc: Option<bool> = "jemalloc",
}
}
/// Per-target configuration stored in the global configuration structure.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct Target {
/// Some(path to llvm-config) if using an external LLVM.
pub llvm_config: Option<PathBuf>,
pub llvm_has_rust_patches: Option<bool>,
/// Some(path to FileCheck) if one was specified.
pub llvm_filecheck: Option<PathBuf>,
pub llvm_libunwind: Option<LlvmLibunwind>,
pub cc: Option<PathBuf>,
pub cxx: Option<PathBuf>,
pub ar: Option<PathBuf>,
pub ranlib: Option<PathBuf>,
pub default_linker: Option<PathBuf>,
pub linker: Option<PathBuf>,
pub split_debuginfo: Option<SplitDebuginfo>,
pub sanitizers: Option<bool>,
pub profiler: Option<StringOrBool>,
pub rpath: Option<bool>,
pub crt_static: Option<bool>,
pub musl_root: Option<PathBuf>,
pub musl_libdir: Option<PathBuf>,
pub wasi_root: Option<PathBuf>,
pub qemu_rootfs: Option<PathBuf>,
pub runner: Option<String>,
pub no_std: bool,
pub codegen_backends: Option<Vec<String>>,
pub optimized_compiler_builtins: Option<bool>,
pub jemalloc: Option<bool>,
}
impl Target {
pub fn from_triple(triple: &str) -> Self {
let mut target: Self = Default::default();
if triple.contains("-none") || triple.contains("nvptx") || triple.contains("switch") {
target.no_std = true;
}
if triple.contains("emscripten") {
target.runner = Some("node".into());
}
target
}
}
impl Config {
pub fn apply_target_config(&mut self, toml_target: Option<HashMap<String, TomlTarget>>) {
if let Some(t) = toml_target {
for (triple, cfg) in t {
let mut target = Target::from_triple(&triple);
if let Some(ref s) = cfg.llvm_config {
if self.download_rustc_commit.is_some() && triple == *self.build.triple {
panic!(
"setting llvm_config for the host is incompatible with download-rustc"
);
}
target.llvm_config = Some(self.src.join(s));
}
if let Some(patches) = cfg.llvm_has_rust_patches {
assert!(
self.submodules == Some(false) || cfg.llvm_config.is_some(),
"use of `llvm-has-rust-patches` is restricted to cases where either submodules are disabled or llvm-config been provided"
);
target.llvm_has_rust_patches = Some(patches);
}
if let Some(ref s) = cfg.llvm_filecheck {
target.llvm_filecheck = Some(self.src.join(s));
}
target.llvm_libunwind = cfg.llvm_libunwind.as_ref().map(|v| {
v.parse().unwrap_or_else(|_| {
panic!("failed to parse target.{triple}.llvm-libunwind")
})
});
if let Some(s) = cfg.no_std {
target.no_std = s;
}
target.cc = cfg.cc.map(PathBuf::from);
target.cxx = cfg.cxx.map(PathBuf::from);
target.ar = cfg.ar.map(PathBuf::from);
target.ranlib = cfg.ranlib.map(PathBuf::from);
target.linker = cfg.linker.map(PathBuf::from);
target.crt_static = cfg.crt_static;
target.musl_root = cfg.musl_root.map(PathBuf::from);
target.musl_libdir = cfg.musl_libdir.map(PathBuf::from);
target.wasi_root = cfg.wasi_root.map(PathBuf::from);
target.qemu_rootfs = cfg.qemu_rootfs.map(PathBuf::from);
target.runner = cfg.runner;
target.sanitizers = cfg.sanitizers;
target.profiler = cfg.profiler;
target.rpath = cfg.rpath;
target.optimized_compiler_builtins = cfg.optimized_compiler_builtins;
target.jemalloc = cfg.jemalloc;
if let Some(ref backends) = cfg.codegen_backends {
let available_backends = ["llvm", "cranelift", "gcc"];
target.codegen_backends = Some(backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
if available_backends.contains(&backend) {
panic!("Invalid value '{s}' for 'target.{triple}.codegen-backends'. Instead, please use '{backend}'.");
} else {
println!("HELP: '{s}' for 'target.{triple}.codegen-backends' might fail. \
Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \
In this case, it would be referred to as '{backend}'.");
}
}
s.clone()
}).collect());
}
target.split_debuginfo = cfg.split_debuginfo.as_ref().map(|v| {
v.parse().unwrap_or_else(|_| {
panic!("invalid value for target.{triple}.split-debuginfo")
})
});
self.target_config.insert(TargetSelection::from_user(&triple), target);
}
}
}
}

View file

@ -666,7 +666,7 @@ impl Config {
}
};
// For the beta compiler, put special effort into ensuring the checksums are valid.
// For the stage0 compiler, put special effort into ensuring the checksums are valid.
let checksum = if should_verify {
let error = format!(
"src/stage0 doesn't contain a checksum for {url}. \
@ -709,10 +709,10 @@ download-rustc = false
";
}
self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error);
if let Some(sha256) = checksum {
if !self.verify(&tarball, sha256) {
panic!("failed to verify {}", tarball.display());
}
if let Some(sha256) = checksum
&& !self.verify(&tarball, sha256)
{
panic!("failed to verify {}", tarball.display());
}
self.unpack(&tarball, &bin_root, prefix);
@ -727,7 +727,7 @@ download-rustc = false
use build_helper::git::PathFreshness;
use crate::core::build_steps::llvm::detect_llvm_freshness;
use crate::core::config::check_incompatible_options_for_ci_llvm;
use crate::core::config::toml::llvm::check_incompatible_options_for_ci_llvm;
if !self.llvm_from_ci {
return;

View file

@ -34,6 +34,8 @@ pub struct Finder {
// Targets can be removed from this list once they are present in the stage0 compiler (usually by updating the beta compiler of the bootstrap).
const STAGE0_MISSING_TARGETS: &[&str] = &[
// just a dummy comment so the list doesn't get onelined
"loongarch32-unknown-none",
"loongarch32-unknown-none-softfloat",
];
/// Minimum version threshold for libstdc++ required when using prebuilt LLVM

View file

@ -81,7 +81,10 @@ const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
/// (Mode restriction, config name, config values (if any))
#[expect(clippy::type_complexity)] // It's fine for hard-coded list and type is explained above.
const EXTRA_CHECK_CFGS: &[(Option<Mode>, &str, Option<&[&'static str]>)] = &[
(None, "bootstrap", None),
(Some(Mode::Rustc), "bootstrap", None),
(Some(Mode::Codegen), "bootstrap", None),
(Some(Mode::ToolRustc), "bootstrap", None),
(Some(Mode::ToolStd), "bootstrap", None),
(Some(Mode::Rustc), "llvm_enzyme", None),
(Some(Mode::Codegen), "llvm_enzyme", None),
(Some(Mode::ToolRustc), "llvm_enzyme", None),
@ -272,6 +275,16 @@ impl Mode {
}
}
/// When `rust.rust_remap_debuginfo` is requested, the compiler needs to know how to
/// opportunistically unremap compiler vs non-compiler sources. We use two schemes,
/// [`RemapScheme::Compiler`] and [`RemapScheme::NonCompiler`].
pub enum RemapScheme {
/// The [`RemapScheme::Compiler`] scheme will remap to `/rustc-dev/{hash}`.
Compiler,
/// The [`RemapScheme::NonCompiler`] scheme will remap to `/rustc/{hash}`.
NonCompiler,
}
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum CLang {
C,
@ -363,19 +376,35 @@ impl Build {
let in_tree_llvm_info = config.in_tree_llvm_info.clone();
let in_tree_gcc_info = config.in_tree_gcc_info.clone();
let initial_target_libdir_str =
config.initial_sysroot.join("lib/rustlib").join(config.build).join("lib");
let initial_target_libdir =
output(Command::new(&config.initial_rustc).args(["--print", "target-libdir"]))
.trim()
.to_owned();
let initial_target_dir = Path::new(&initial_target_libdir)
.parent()
.unwrap_or_else(|| panic!("{initial_target_libdir} has no parent"));
let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap();
let initial_lld = initial_target_dir.join("bin").join("rust-lld");
let initial_relative_libdir = initial_target_dir
.ancestors()
.nth(2)
.unwrap()
.strip_prefix(&config.initial_sysroot)
.expect("Couldnt determine initial relative libdir.")
.to_path_buf();
let initial_relative_libdir = if cfg!(test) {
// On tests, bootstrap uses the shim rustc, not the one from the stage0 toolchain.
PathBuf::default()
} else {
let ancestor = initial_target_dir.ancestors().nth(2).unwrap_or_else(|| {
panic!("Not enough ancestors for {}", initial_target_dir.display())
});
ancestor
.strip_prefix(&config.initial_sysroot)
.unwrap_or_else(|_| {
panic!(
"Couldnt resolve the initial relative libdir from {}",
initial_target_dir.display()
)
})
.to_path_buf()
};
let version = std::fs::read_to_string(src.join("src").join("version"))
.expect("failed to read src/version");
@ -1088,8 +1117,15 @@ Executed at: {executed_at}"#,
&self,
what: impl Display,
target: impl Into<Option<TargetSelection>>,
custom_stage: Option<u32>,
) -> Option<gha::Group> {
self.msg(Kind::Check, self.config.stage, what, self.config.build, target)
self.msg(
Kind::Check,
custom_stage.unwrap_or(self.config.stage),
what,
self.config.build,
target,
)
}
#[must_use = "Groups should not be dropped until the Step finishes running"]
@ -1194,7 +1230,7 @@ Executed at: {executed_at}"#,
})
}
fn debuginfo_map_to(&self, which: GitRepo) -> Option<String> {
fn debuginfo_map_to(&self, which: GitRepo, remap_scheme: RemapScheme) -> Option<String> {
if !self.config.rust_remap_debuginfo {
return None;
}
@ -1202,7 +1238,24 @@ Executed at: {executed_at}"#,
match which {
GitRepo::Rustc => {
let sha = self.rust_sha().unwrap_or(&self.version);
Some(format!("/rustc/{sha}"))
match remap_scheme {
RemapScheme::Compiler => {
// For compiler sources, remap via `/rustc-dev/{sha}` to allow
// distinguishing between compiler sources vs library sources, since
// `rustc-dev` dist component places them under
// `$sysroot/lib/rustlib/rustc-src/rust` as opposed to `rust-src`'s
// `$sysroot/lib/rustlib/src/rust`.
//
// Keep this scheme in sync with `rustc_metadata::rmeta::decoder`'s
// `try_to_translate_virtual_to_real`.
Some(format!("/rustc-dev/{sha}"))
}
RemapScheme::NonCompiler => {
// For non-compiler sources, use `/rustc/{sha}` remapping scheme.
Some(format!("/rustc/{sha}"))
}
}
}
GitRepo::Llvm => Some(String::from("/rustc/llvm")),
}
@ -1269,7 +1322,7 @@ Executed at: {executed_at}"#,
base.push("-fno-omit-frame-pointer".into());
}
if let Some(map_to) = self.debuginfo_map_to(which) {
if let Some(map_to) = self.debuginfo_map_to(which, RemapScheme::NonCompiler) {
let map = format!("{}={}", self.src.display(), map_to);
let cc = self.cc(target);
if cc.ends_with("clang") || cc.ends_with("gcc") {
@ -1435,23 +1488,23 @@ Executed at: {executed_at}"#,
// Look for Wasmtime, and for its default options be sure to disable
// its caching system since we're executing quite a lot of tests and
// ideally shouldn't pollute the cache too much.
if let Some(path) = finder.maybe_have("wasmtime") {
if let Ok(mut path) = path.into_os_string().into_string() {
path.push_str(" run -C cache=n --dir .");
// Make sure that tests have access to RUSTC_BOOTSTRAP. This (for example) is
// required for libtest to work on beta/stable channels.
//
// NB: with Wasmtime 20 this can change to `-S inherit-env` to
// inherit the entire environment rather than just this single
// environment variable.
path.push_str(" --env RUSTC_BOOTSTRAP");
if let Some(path) = finder.maybe_have("wasmtime")
&& let Ok(mut path) = path.into_os_string().into_string()
{
path.push_str(" run -C cache=n --dir .");
// Make sure that tests have access to RUSTC_BOOTSTRAP. This (for example) is
// required for libtest to work on beta/stable channels.
//
// NB: with Wasmtime 20 this can change to `-S inherit-env` to
// inherit the entire environment rather than just this single
// environment variable.
path.push_str(" --env RUSTC_BOOTSTRAP");
if target.contains("wasip2") {
path.push_str(" --wasi inherit-network --wasi allow-ip-name-lookup");
}
return Some(path);
if target.contains("wasip2") {
path.push_str(" --wasi inherit-network --wasi allow-ip-name-lookup");
}
return Some(path);
}
None
@ -1621,12 +1674,12 @@ Executed at: {executed_at}"#,
/// sha, version, etc.
fn rust_version(&self) -> String {
let mut version = self.rust_info().version(self, &self.version);
if let Some(ref s) = self.config.description {
if !s.is_empty() {
version.push_str(" (");
version.push_str(s);
version.push(')');
}
if let Some(ref s) = self.config.description
&& !s.is_empty()
{
version.push_str(" (");
version.push_str(s);
version.push(')');
}
version
}
@ -1744,14 +1797,14 @@ Executed at: {executed_at}"#,
pub fn copy_link(&self, src: &Path, dst: &Path, file_type: FileType) {
self.copy_link_internal(src, dst, false);
if file_type.could_have_split_debuginfo() {
if let Some(dbg_file) = split_debuginfo(src) {
self.copy_link_internal(
&dbg_file,
&dst.with_extension(dbg_file.extension().unwrap()),
false,
);
}
if file_type.could_have_split_debuginfo()
&& let Some(dbg_file) = split_debuginfo(src)
{
self.copy_link_internal(
&dbg_file,
&dst.with_extension(dbg_file.extension().unwrap()),
false,
);
}
}
@ -1763,19 +1816,21 @@ Executed at: {executed_at}"#,
if src == dst {
return;
}
if let Err(e) = fs::remove_file(dst) {
if cfg!(windows) && e.kind() != io::ErrorKind::NotFound {
// workaround for https://github.com/rust-lang/rust/issues/127126
// if removing the file fails, attempt to rename it instead.
let now = t!(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH));
let _ = fs::rename(dst, format!("{}-{}", dst.display(), now.as_nanos()));
}
if let Err(e) = fs::remove_file(dst)
&& cfg!(windows)
&& e.kind() != io::ErrorKind::NotFound
{
// workaround for https://github.com/rust-lang/rust/issues/127126
// if removing the file fails, attempt to rename it instead.
let now = t!(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH));
let _ = fs::rename(dst, format!("{}-{}", dst.display(), now.as_nanos()));
}
let metadata = t!(src.symlink_metadata(), format!("src = {}", src.display()));
let mut metadata = t!(src.symlink_metadata(), format!("src = {}", src.display()));
let mut src = src.to_path_buf();
if metadata.file_type().is_symlink() {
if dereference_symlinks {
src = t!(fs::canonicalize(src));
metadata = t!(fs::metadata(&src), format!("target = {}", src.display()));
} else {
let link = t!(fs::read_link(src));
t!(self.symlink_file(link, dst));
@ -1878,10 +1933,10 @@ Executed at: {executed_at}"#,
chmod(&dst, file_type.perms());
// If this file can have debuginfo, look for split debuginfo and install it too.
if file_type.could_have_split_debuginfo() {
if let Some(dbg_file) = split_debuginfo(src) {
self.install(&dbg_file, dstdir, FileType::Regular);
}
if file_type.could_have_split_debuginfo()
&& let Some(dbg_file) = split_debuginfo(src)
{
self.install(&dbg_file, dstdir, FileType::Regular);
}
}

View file

@ -22,43 +22,13 @@
//! everything.
use std::collections::HashSet;
use std::iter;
use std::path::{Path, PathBuf};
use std::{env, iter};
use crate::core::config::TargetSelection;
use crate::utils::exec::{BootstrapCommand, command};
use crate::{Build, CLang, GitRepo};
/// Finds archiver tool for the given target if possible.
/// FIXME(onur-ozkan): This logic should be replaced by calling into the `cc` crate.
fn cc2ar(cc: &Path, target: TargetSelection, default_ar: PathBuf) -> Option<PathBuf> {
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace('-', "_"))) {
Some(PathBuf::from(ar))
} else if let Some(ar) = env::var_os("AR") {
Some(PathBuf::from(ar))
} else if target.is_msvc() {
None
} else if target.contains("musl") || target.contains("openbsd") {
Some(PathBuf::from("ar"))
} else if target.contains("vxworks") {
Some(PathBuf::from("wr-ar"))
} else if target.contains("-nto-") {
if target.starts_with("i586") {
Some(PathBuf::from("ntox86-ar"))
} else if target.starts_with("aarch64") {
Some(PathBuf::from("ntoaarch64-ar"))
} else if target.starts_with("x86_64") {
Some(PathBuf::from("ntox86_64-ar"))
} else {
panic!("Unknown architecture, cannot determine archiver for Neutrino QNX");
}
} else if target.contains("android") || target.contains("-wasi") {
Some(cc.parent().unwrap().join(PathBuf::from("llvm-ar")))
} else {
Some(default_ar)
}
}
/// Creates and configures a new [`cc::Build`] instance for the given target.
fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build {
let mut cfg = cc::Build::new();
@ -140,7 +110,7 @@ pub fn find_target(build: &Build, target: TargetSelection) {
let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
ar
} else {
cc2ar(compiler.path(), target, PathBuf::from(cfg.get_archiver().get_program()))
cfg.try_get_archiver().map(|c| PathBuf::from(c.get_program())).ok()
};
build.cc.borrow_mut().insert(target, compiler.clone());

View file

@ -5,119 +5,6 @@ use super::*;
use crate::core::config::{Target, TargetSelection};
use crate::{Build, Config, Flags};
#[test]
fn test_cc2ar_env_specific() {
let triple = "x86_64-unknown-linux-gnu";
let key = "AR_x86_64_unknown_linux_gnu";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::set_var(key, "custom-ar") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var(key) };
assert_eq!(result, Some(PathBuf::from("custom-ar")));
}
#[test]
fn test_cc2ar_musl() {
let triple = "x86_64-unknown-linux-musl";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_linux_musl") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ar")));
}
#[test]
fn test_cc2ar_openbsd() {
let triple = "x86_64-unknown-openbsd";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_openbsd") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/cc");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ar")));
}
#[test]
fn test_cc2ar_vxworks() {
let triple = "armv7-wrs-vxworks";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_armv7_wrs_vxworks") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("wr-ar")));
}
#[test]
fn test_cc2ar_nto_i586() {
let triple = "i586-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_i586_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntox86-ar")));
}
#[test]
fn test_cc2ar_nto_aarch64() {
let triple = "aarch64-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_aarch64_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntoaarch64-ar")));
}
#[test]
fn test_cc2ar_nto_x86_64() {
let triple = "x86_64-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntox86_64-ar")));
}
#[test]
#[should_panic(expected = "Unknown architecture, cannot determine archiver for Neutrino QNX")]
fn test_cc2ar_nto_unknown() {
let triple = "powerpc-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_powerpc_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let _ = cc2ar(cc, target, default_ar);
}
#[test]
fn test_ndk_compiler_c() {
let ndk_path = PathBuf::from("/ndk");

View file

@ -46,10 +46,10 @@ pub fn find_recent_config_change_ids(current_id: usize) -> &'static [ChangeInfo]
// an empty list (it may be due to switching from a recent branch to an
// older one); otherwise, return the full list (assuming the user provided
// the incorrect change-id by accident).
if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id) {
if current_id > config.change_id {
return &[];
}
if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id)
&& current_id > config.change_id
{
return &[];
}
CONFIG_CHANGE_HISTORY
@ -411,4 +411,14 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Info,
summary: "`./x run` now supports running in-tree `rustfmt`, e.g., `./x run rustfmt -- --check /path/to/file.rs`.",
},
ChangeInfo {
change_id: 119899,
severity: ChangeSeverity::Warning,
summary: "Stage0 library no longer matches the in-tree library, which means stage1 compiler now uses the beta library.",
},
ChangeInfo {
change_id: 141970,
severity: ChangeSeverity::Info,
summary: "Added new bootstrap flag `--skip-std-check-if-no-download-rustc` that skips std checks when download-rustc is unavailable. Mainly intended for developers to reduce RA overhead.",
},
];

View file

@ -332,16 +332,19 @@ impl Default for CommandOutput {
/// Helper trait to format both Command and BootstrapCommand as a short execution line,
/// without all the other details (e.g. environment variables).
#[cfg(feature = "tracing")]
pub trait FormatShortCmd {
fn format_short_cmd(&self) -> String;
}
#[cfg(feature = "tracing")]
impl FormatShortCmd for BootstrapCommand {
fn format_short_cmd(&self) -> String {
self.command.format_short_cmd()
}
}
#[cfg(feature = "tracing")]
impl FormatShortCmd for Command {
fn format_short_cmd(&self) -> String {
let program = Path::new(self.get_program());

View file

@ -46,7 +46,16 @@ pub fn dylib_path() -> Vec<std::path::PathBuf> {
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
pub fn exe(name: &str, target: &str) -> String {
if target.contains("windows") {
// On Cygwin, the decision to append .exe or not is not as straightforward.
// Executable files do actually have .exe extensions so on hosts other than
// Cygwin it is necessary. But on a Cygwin host there is magic happening
// that redirects requests for file X to file X.exe if it exists, and
// furthermore /proc/self/exe (and thus std::env::current_exe) always
// returns the name *without* the .exe extension. For comparisons against
// that to match, we therefore do not append .exe for Cygwin targets on
// a Cygwin host.
if target.contains("windows") || (cfg!(not(target_os = "cygwin")) && target.contains("cygwin"))
{
format!("{name}.exe")
} else if target.contains("uefi") {
format!("{name}.efi")

View file

@ -10,23 +10,24 @@ pub mod util;
/// The default set of crates for opt-dist to collect LLVM profiles.
pub const LLVM_PGO_CRATES: &[&str] = &[
"syn-1.0.89",
"cargo-0.60.0",
"serde-1.0.136",
"ripgrep-13.0.0",
"regex-1.5.5",
"clap-3.1.6",
"hyper-0.14.18",
"syn-2.0.101",
"cargo-0.87.1",
"serde-1.0.219",
"ripgrep-14.1.1",
"regex-automata-0.4.8",
"clap_derive-4.5.32",
"hyper-1.6.0",
];
/// The default set of crates for opt-dist to collect rustc profiles.
pub const RUSTC_PGO_CRATES: &[&str] = &[
"externs",
"ctfe-stress-5",
"cargo-0.60.0",
"cargo-0.87.1",
"token-stream-stress",
"match-stress",
"tuple-stress",
"diesel-1.4.8",
"bitmaps-3.1.0",
"diesel-2.2.10",
"bitmaps-3.2.1",
"serde-1.0.219-new-solver",
];

View file

@ -66,9 +66,9 @@ checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "askama"
version = "0.13.1"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4744ed2eef2645831b441d8f5459689ade2ab27c854488fbab1fbe94fce1a7"
checksum = "f75363874b771be265f4ffe307ca705ef6f3baa19011c149da8674a87f1b75c4"
dependencies = [
"askama_derive",
"itoa",
@ -79,9 +79,9 @@ dependencies = [
[[package]]
name = "askama_derive"
version = "0.13.1"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d661e0f57be36a5c14c48f78d09011e67e0cb618f269cca9f2fd8d15b68c46ac"
checksum = "129397200fe83088e8a68407a8e2b1f826cf0086b21ccdb866a722c8bcd3a94f"
dependencies = [
"askama_parser",
"basic-toml",
@ -96,9 +96,9 @@ dependencies = [
[[package]]
name = "askama_parser"
version = "0.13.0"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f"
checksum = "d6ab5630b3d5eaf232620167977f95eb51f3432fc76852328774afbd242d4358"
dependencies = [
"memchr",
"serde",

View file

@ -5,7 +5,7 @@ edition = "2021"
[dependencies]
anyhow = "1"
askama = "0.13"
askama = "0.14"
clap = { version = "4.5", features = ["derive"] }
csv = "1"
diff = "0.1"

View file

@ -13,7 +13,7 @@ use crate::utils::load_env_var;
#[derive(serde::Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct Job {
/// Name of the job, e.g. mingw-check
/// Name of the job, e.g. mingw-check-1
pub name: String,
/// GitHub runner on which the job should be executed
pub os: String,
@ -85,14 +85,20 @@ impl JobDatabase {
}
pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> {
let mut db: Value = serde_yaml::from_str(db)?;
let mut db: Value = serde_yaml::from_str(db).context("failed to parse YAML content")?;
// We need to expand merge keys (<<), because serde_yaml can't deal with them
// `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges.
db.apply_merge()?;
db.apply_merge()?;
let db: JobDatabase = serde_yaml::from_value(db)?;
let apply_merge = |db: &mut Value| -> anyhow::Result<()> {
db.apply_merge().context("failed to apply merge keys")
};
// Apply merge twice to handle nested merges
apply_merge(&mut db)?;
apply_merge(&mut db)?;
let db: JobDatabase = serde_yaml::from_value(db).context("failed to parse job database")?;
Ok(db)
}

View file

@ -1,4 +1,8 @@
use std::path::Path;
use super::Job;
use crate::jobs::{JobDatabase, load_job_db};
use crate::{DOCKER_DIRECTORY, JOBS_YML_PATH, utils};
#[test]
fn lookup_job_pattern() {
@ -62,3 +66,65 @@ fn check_pattern(db: &JobDatabase, pattern: &str, expected: &[&str]) {
assert_eq!(jobs, expected);
}
/// Validate that CodeBuild jobs use Docker images from ghcr.io registry.
/// This is needed because otherwise from CodeBuild we get rate limited by Docker Hub.
fn validate_codebuild_image(job: &Job) -> anyhow::Result<()> {
let is_job_on_codebuild = job.codebuild.unwrap_or(false);
if !is_job_on_codebuild {
// Jobs in GitHub Actions don't get rate limited by Docker Hub.
return Ok(());
}
let image_name = job.image();
// we hardcode host-x86_64 here, because in codebuild we only run jobs for this architecture.
let dockerfile_path =
Path::new(DOCKER_DIRECTORY).join("host-x86_64").join(&image_name).join("Dockerfile");
if !dockerfile_path.exists() {
return Err(anyhow::anyhow!(
"Dockerfile not found for CodeBuild job '{}' at path: {}",
job.name,
dockerfile_path.display()
));
}
let dockerfile_content = utils::read_to_string(&dockerfile_path)?;
// Check if all FROM statement uses ghcr.io registry
let has_ghcr_from = dockerfile_content
.lines()
.filter(|line| line.trim_start().to_lowercase().starts_with("from "))
.all(|line| line.contains("ghcr.io"));
if !has_ghcr_from {
return Err(anyhow::anyhow!(
"CodeBuild job '{}' must use ghcr.io registry in its Dockerfile FROM statement. \
Dockerfile path: {dockerfile_path:?}",
job.name,
));
}
Ok(())
}
#[test]
fn validate_jobs() {
let db = {
let default_jobs_file = Path::new(JOBS_YML_PATH);
let db_str = utils::read_to_string(default_jobs_file).unwrap();
load_job_db(&db_str).expect("Failed to load job database")
};
let all_jobs =
db.pr_jobs.iter().chain(db.try_jobs.iter()).chain(db.auto_jobs.iter()).collect::<Vec<_>>();
let errors: Vec<anyhow::Error> =
all_jobs.into_iter().filter_map(|job| validate_codebuild_image(job).err()).collect();
if !errors.is_empty() {
let error_messages =
errors.into_iter().map(|e| format!("- {e}")).collect::<Vec<_>>().join("\n");
panic!("Job validation failed:\n{error_messages}");
}
}

View file

@ -27,7 +27,7 @@ use crate::test_dashboard::generate_test_dashboard;
use crate::utils::{load_env_var, output_details};
const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
pub const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
const JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../github-actions/jobs.yml");
struct GitHubContext {

View file

@ -40,7 +40,7 @@ try-job: dist-i686-msvc"#,
fn pr_jobs() {
let stdout = get_matrix("pull_request", "commit", "refs/heads/pr/1234");
insta::assert_snapshot!(stdout, @r#"
jobs=[{"name":"mingw-check","full_name":"PR - mingw-check","os":"ubuntu-24.04","env":{"PR_CI_JOB":1},"free_disk":true},{"name":"mingw-check-tidy","full_name":"PR - mingw-check-tidy","os":"ubuntu-24.04","env":{"PR_CI_JOB":1},"continue_on_error":true,"free_disk":true,"doc_url":"https://foo.bar"}]
jobs=[{"name":"mingw-check-1","full_name":"PR - mingw-check-1","os":"ubuntu-24.04","env":{"PR_CI_JOB":1},"free_disk":true},{"name":"mingw-check-2","full_name":"PR - mingw-check-2","os":"ubuntu-24.04","env":{"PR_CI_JOB":1},"free_disk":true},{"name":"mingw-check-tidy","full_name":"PR - mingw-check-tidy","os":"ubuntu-24.04","env":{"PR_CI_JOB":1},"continue_on_error":true,"free_disk":true,"doc_url":"https://foo.bar"}]
run_type=pr
"#);
}
@ -51,6 +51,8 @@ fn get_matrix(event_name: &str, commit_msg: &str, branch_ref: &str) -> String {
.env("GITHUB_EVENT_NAME", event_name)
.env("COMMIT_MESSAGE", commit_msg)
.env("GITHUB_REF", branch_ref)
.env("GITHUB_RUN_ID", "123")
.env("GITHUB_RUN_ATTEMPT", "1")
.stdout(Stdio::piped())
.output()
.expect("Failed to execute command");

View file

@ -64,7 +64,9 @@ envs:
# These jobs automatically inherit envs.pr, to avoid repeating
# it in each job definition.
pr:
- name: mingw-check
- name: mingw-check-1
<<: *job-linux-4c
- name: mingw-check-2
<<: *job-linux-4c
- name: mingw-check-tidy
continue_on_error: true

View file

@ -0,0 +1,58 @@
FROM ubuntu:24.10
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \
bzip2 \
g++ \
make \
ninja-build \
file \
curl \
ca-certificates \
python3 \
git \
cmake \
sudo \
gdb \
llvm-19-tools \
llvm-19-dev \
libedit-dev \
libssl-dev \
pkg-config \
zlib1g-dev \
xz-utils \
nodejs \
mingw-w64 \
# libgccjit dependencies
flex \
libmpfr-dev \
libgmp-dev \
libmpc3 \
libmpc-dev \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# We are disabling CI LLVM since this builder is intentionally using a host
# LLVM, rather than the typical src/llvm-project LLVM.
ENV NO_DOWNLOAD_CI_LLVM 1
ENV EXTERNAL_LLVM 1
# Using llvm-link-shared due to libffi issues -- see #34486
ENV RUST_CONFIGURE_ARGS \
--build=aarch64-unknown-linux-gnu \
--llvm-root=/usr/lib/llvm-19 \
--enable-llvm-link-shared \
--set rust.randomize-layout=true \
--set rust.thin-lto-import-instr-limit=10
COPY scripts/shared.sh /scripts/
ARG SCRIPT_ARG
COPY scripts/stage_2_test_set1.sh /tmp/
COPY scripts/stage_2_test_set2.sh /tmp/
ENV SCRIPT "/tmp/${SCRIPT_ARG}"

View file

@ -28,6 +28,7 @@ RUN /scripts/android-sdk.sh
ENV PATH=$PATH:/android/sdk/emulator
ENV PATH=$PATH:/android/sdk/tools
ENV PATH=$PATH:/android/sdk/platform-tools
ENV PATH=$PATH:/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/bin
ENV TARGETS=arm-linux-androideabi

View file

@ -22,6 +22,8 @@ ENV RUST_CONFIGURE_ARGS \
--android-ndk=/android/ndk/ \
--disable-docs
ENV PATH=$PATH:/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/bin
ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS
COPY scripts/sccache.sh /scripts/

View file

@ -0,0 +1,35 @@
FROM ghcr.io/rust-lang/ubuntu:22.04
COPY scripts/cross-apt-packages.sh /scripts/
RUN sh /scripts/cross-apt-packages.sh
COPY scripts/crosstool-ng.sh /scripts/
RUN sh /scripts/crosstool-ng.sh
WORKDIR /build
COPY scripts/rustbuild-setup.sh /scripts/
RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-arm-linux-gnueabi/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \
AR_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-ar \
CXX_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-g++
ENV HOSTS=arm-unknown-linux-gnueabi
ENV RUST_CONFIGURE_ARGS \
--enable-full-tools \
--disable-docs \
--enable-sanitizers \
--enable-profiler
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -19,19 +19,13 @@ RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-arm-linux/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig
COPY host-x86_64/dist-arm-linux-musl/arm-linux-musl.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \
AR_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-ar \
CXX_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-g++
ENV HOSTS=arm-unknown-linux-gnueabi,aarch64-unknown-linux-musl
ENV HOSTS=aarch64-unknown-linux-musl
ENV RUST_CONFIGURE_ARGS \
--enable-full-tools \

View file

@ -0,0 +1,13 @@
CT_CONFIG_VERSION="4"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_USE_MIRROR=y
CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
CT_ARCH_ARM=y
CT_ARCH_ARCH="armv6"
CT_ARCH_FLOAT_SW=y
CT_KERNEL_LINUX=y
CT_LINUX_V_3_2=y
CT_BINUTILS_V_2_32=y
CT_GLIBC_V_2_17=y
CT_GCC_V_8=y
CT_CC_LANG_CXX=y

View file

@ -0,0 +1,41 @@
FROM ubuntu:22.04
COPY scripts/cross-apt-packages.sh /scripts/
RUN sh /scripts/cross-apt-packages.sh
COPY scripts/crosstool-ng.sh /scripts/
RUN sh /scripts/crosstool-ng.sh
COPY scripts/rustbuild-setup.sh /scripts/
RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-powerpc64le-linux-gnu/powerpc64le-unknown-linux-gnu.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
WORKDIR /build
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
COPY scripts/shared.sh scripts/build-powerpc64le-toolchain.sh /build/
RUN ./build-powerpc64le-toolchain.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV \
AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \
CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++
ENV HOSTS=powerpc64le-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS \
--enable-extended \
--enable-full-tools \
--enable-profiler \
--enable-sanitizers \
--disable-docs
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -0,0 +1,14 @@
CT_CONFIG_VERSION="4"
CT_EXPERIMENTAL=y
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_USE_MIRROR=y
CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
CT_ARCH_POWERPC=y
CT_ARCH_LE=y
CT_ARCH_64=y
# CT_DEMULTILIB is not set
CT_ARCH_ARCH="powerpc64le"
CT_KERNEL_LINUX=y
CT_LINUX_V_4_19=y
CT_CC_LANG_CXX=y
CT_GETTEXT_NEEDED=y

View file

@ -12,13 +12,13 @@ RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-powerpc64le-linux/powerpc64le-unknown-linux-musl.defconfig /tmp/crosstool.defconfig
COPY host-x86_64/dist-powerpc64le-linux-musl/powerpc64le-unknown-linux-musl.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
WORKDIR /build
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
COPY scripts/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /build/
COPY scripts/shared.sh scripts/build-powerpc64le-toolchain.sh /build/
RUN ./build-powerpc64le-toolchain.sh
COPY scripts/sccache.sh /scripts/
@ -27,14 +27,11 @@ RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/powerpc64le-unknown-linux-musl/bin
ENV \
AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \
CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++ \
AR_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-ar \
CC_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-gcc \
CXX_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-g++
ENV HOSTS=powerpc64le-unknown-linux-gnu,powerpc64le-unknown-linux-musl
ENV HOSTS=powerpc64le-unknown-linux-musl
ENV RUST_CONFIGURE_ARGS \
--enable-extended \

View file

@ -0,0 +1,36 @@
FROM ubuntu:22.04
COPY scripts/cross-apt-packages.sh /tmp/
RUN bash /tmp/cross-apt-packages.sh
# Required gcc dependencies.
RUN apt-get update && \
apt-get install -y --no-install-recommends \
libgmp-dev \
libmpfr-dev \
libmpc-dev \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/shared.sh /tmp/
COPY scripts/solaris-toolchain.sh /tmp/
RUN bash /tmp/solaris-toolchain.sh sparcv9 sysroot
RUN bash /tmp/solaris-toolchain.sh sparcv9 binutils
RUN bash /tmp/solaris-toolchain.sh sparcv9 gcc
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
COPY scripts/cmake.sh /scripts/
RUN /scripts/cmake.sh
ENV \
AR_sparcv9_sun_solaris=sparcv9-solaris-ar \
RANLIB_sparcv9_sun_solaris=sparcv9-solaris-ranlib \
CC_sparcv9_sun_solaris=sparcv9-solaris-gcc \
CXX_sparcv9_sun_solaris=sparcv9-solaris-g++
ENV HOSTS=sparcv9-sun-solaris
ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -43,12 +43,6 @@ ENV \
CXX_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang++ \
CXXFLAGS_aarch64_unknown_fuchsia="--target=aarch64-unknown-fuchsia --sysroot=/usr/local/core-linux-amd64-fuchsia-sdk/arch/arm64/sysroot -I/usr/local/core-linux-amd64-fuchsia-sdk/pkg/fdio/include" \
LDFLAGS_aarch64_unknown_fuchsia="--target=aarch64-unknown-fuchsia --sysroot=/usr/local/core-linux-amd64-fuchsia-sdk/arch/arm64/sysroot -L/usr/local/core-linux-amd64-fuchsia-sdk/arch/arm64/lib" \
AR_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-ar \
CC_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-gcc \
CXX_sparcv9_sun_solaris=sparcv9-sun-solaris2.10-g++ \
AR_x86_64_pc_solaris=x86_64-pc-solaris2.10-ar \
CC_x86_64_pc_solaris=x86_64-pc-solaris2.10-gcc \
CXX_x86_64_pc_solaris=x86_64-pc-solaris2.10-g++ \
CC_armv7_unknown_linux_gnueabi=arm-linux-gnueabi-gcc-9 \
CXX_armv7_unknown_linux_gnueabi=arm-linux-gnueabi-g++-9 \
AR_x86_64_fortanix_unknown_sgx=ar \
@ -84,9 +78,6 @@ WORKDIR /tmp
COPY scripts/shared.sh /tmp/
COPY scripts/build-fuchsia-toolchain.sh /tmp/
RUN /tmp/build-fuchsia-toolchain.sh
COPY host-x86_64/dist-various-2/build-solaris-toolchain.sh /tmp/
RUN /tmp/build-solaris-toolchain.sh x86_64 amd64 solaris-i386 pc
RUN /tmp/build-solaris-toolchain.sh sparcv9 sparcv9 solaris-sparc sun
COPY host-x86_64/dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/
RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh
@ -118,8 +109,6 @@ ENV TARGETS=$TARGETS,wasm32-wasip1
ENV TARGETS=$TARGETS,wasm32-wasip1-threads
ENV TARGETS=$TARGETS,wasm32-wasip2
ENV TARGETS=$TARGETS,wasm32v1-none
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
ENV TARGETS=$TARGETS,x86_64-pc-solaris
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
ENV TARGETS=$TARGETS,x86_64-fortanix-unknown-sgx
ENV TARGETS=$TARGETS,nvptx64-nvidia-cuda

View file

@ -1,111 +0,0 @@
#!/usr/bin/env bash
set -ex
source shared.sh
ARCH=$1
LIB_ARCH=$2
APT_ARCH=$3
MANUFACTURER=$4
BINUTILS=2.28.1
GCC=6.5.0
TARGET=${ARCH}-${MANUFACTURER}-solaris2.10
# First up, build binutils
mkdir binutils
cd binutils
curl https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILS.tar.xz | tar xJf -
mkdir binutils-build
cd binutils-build
hide_output ../binutils-$BINUTILS/configure --target=$TARGET
hide_output make -j10
hide_output make install
cd ../..
rm -rf binutils
# Next, download and install the relevant solaris packages
mkdir solaris
cd solaris
dpkg --add-architecture $APT_ARCH
apt-get update
apt-get install -y --download-only \
libc:$APT_ARCH \
liblgrp:$APT_ARCH \
libm-dev:$APT_ARCH \
libpthread:$APT_ARCH \
libresolv:$APT_ARCH \
librt:$APT_ARCH \
libsendfile:$APT_ARCH \
libsocket:$APT_ARCH \
system-crt:$APT_ARCH \
system-header:$APT_ARCH
for deb in /var/cache/apt/archives/*$APT_ARCH.deb; do
dpkg -x $deb .
done
apt-get clean
# The -dev packages are not available from the apt repository we're using.
# However, those packages are just symlinks from *.so to *.so.<version>.
# This makes all those symlinks.
for lib in $(find -name '*.so.*'); do
target=${lib%.so.*}.so
ln -s ${lib##*/} $target || echo "warning: silenced error symlinking $lib"
done
# Remove Solaris 11 functions that are optionally used by libbacktrace.
# This is for Solaris 10 compatibility.
rm usr/include/link.h
patch -p0 << 'EOF'
--- usr/include/string.h
+++ usr/include/string10.h
@@ -93 +92,0 @@
-extern size_t strnlen(const char *, size_t);
EOF
mkdir /usr/local/$TARGET/usr
mv usr/include /usr/local/$TARGET/usr/include
mv usr/lib/$LIB_ARCH/* /usr/local/$TARGET/lib
mv lib/$LIB_ARCH/* /usr/local/$TARGET/lib
ln -s usr/include /usr/local/$TARGET/sys-include
ln -s usr/include /usr/local/$TARGET/include
cd ..
rm -rf solaris
# Finally, download and build gcc to target solaris
mkdir gcc
cd gcc
curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.xz | tar xJf -
cd gcc-$GCC
mkdir ../gcc-build
cd ../gcc-build
hide_output ../gcc-$GCC/configure \
--enable-languages=c,c++ \
--target=$TARGET \
--with-gnu-as \
--with-gnu-ld \
--disable-multilib \
--disable-nls \
--disable-libgomp \
--disable-libquadmath \
--disable-libssp \
--disable-libvtv \
--disable-libcilkrts \
--disable-libada \
--disable-libsanitizer \
--disable-libquadmath-support \
--disable-lto
hide_output make -j10
hide_output make install
cd ../..
rm -rf gcc

View file

@ -15,6 +15,7 @@ RUN apt-get update && \
python2.7 \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/shared.sh /tmp/
COPY scripts/illumos-toolchain.sh /tmp/
RUN bash /tmp/illumos-toolchain.sh x86_64 sysroot

View file

@ -96,14 +96,13 @@ ENV RUST_CONFIGURE_ARGS \
--set rust.lto=thin \
--set rust.codegen-units=1
# Note that `rust.debug` is set to true *only* for `opt-dist`
ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \
./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \
--host $HOSTS --target $HOSTS \
--include-default-paths \
build-manifest bootstrap && \
# Use GCC for building GCC, as it seems to behave badly when built with Clang
CC=/rustroot/bin/cc CXX=/rustroot/bin/c++ python3 ../x.py dist gcc
ARG SCRIPT_ARG
COPY host-x86_64/dist-x86_64-linux/dist.sh /scripts/
COPY host-x86_64/dist-x86_64-linux/dist-alt.sh /scripts/
ENV SCRIPT /scripts/${SCRIPT_ARG}
ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang
# This is the only builder which will create source tarballs

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -eux
python3 ../x.py dist \
--host $HOSTS --target $HOSTS \
--include-default-paths \
build-manifest bootstrap

View file

@ -0,0 +1,13 @@
#!/bin/bash
set -eux
python3 ../x.py build --set rust.debug=true opt-dist
./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \
--host $HOSTS --target $HOSTS \
--include-default-paths \
build-manifest bootstrap
# Use GCC for building GCC, as it seems to behave badly when built with Clang
CC=/rustroot/bin/cc CXX=/rustroot/bin/c++ python3 ../x.py dist gcc

View file

@ -0,0 +1,36 @@
FROM ubuntu:22.04
COPY scripts/cross-apt-packages.sh /tmp/
RUN bash /tmp/cross-apt-packages.sh
# Required gcc dependencies.
RUN apt-get update && \
apt-get install -y --no-install-recommends \
libgmp-dev \
libmpfr-dev \
libmpc-dev \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/shared.sh /tmp/
COPY scripts/solaris-toolchain.sh /tmp/
RUN bash /tmp/solaris-toolchain.sh x86_64 sysroot
RUN bash /tmp/solaris-toolchain.sh x86_64 binutils
RUN bash /tmp/solaris-toolchain.sh x86_64 gcc
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
COPY scripts/cmake.sh /scripts/
RUN /scripts/cmake.sh
ENV \
AR_x86_64_pc_solaris=x86_64-solaris-ar \
RANLIB_x86_64_pc_solaris=x86_64-solaris-ranlib \
CC_x86_64_pc_solaris=x86_64-solaris-gcc \
CXX_x86_64_pc_solaris=x86_64-solaris-g++
ENV HOSTS=x86_64-pc-solaris
ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -34,38 +34,27 @@ RUN npm install es-check@6.1.1 eslint@8.6.0 typescript@5.7.3 -g
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
COPY host-x86_64/mingw-check/reuse-requirements.txt /tmp/
COPY host-x86_64/mingw-check-1/reuse-requirements.txt /tmp/
RUN pip3 install --no-deps --no-cache-dir --require-hashes -r /tmp/reuse-requirements.txt
COPY host-x86_64/mingw-check/check-default-config-profiles.sh /scripts/
COPY host-x86_64/mingw-check/validate-toolstate.sh /scripts/
COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
COPY host-x86_64/mingw-check-1/check-default-config-profiles.sh /scripts/
COPY host-x86_64/mingw-check-1/validate-toolstate.sh /scripts/
COPY host-x86_64/mingw-check-1/validate-error-codes.sh /scripts/
# Check library crates on all tier 1 targets.
# We disable optimized compiler built-ins because that requires a C toolchain for the target.
# We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs.
ENV SCRIPT \
python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
/scripts/check-default-config-profiles.sh && \
python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \
python3 ../x.py check --target=x86_64-pc-windows-gnu --host=x86_64-pc-windows-gnu && \
python3 ../x.py clippy ci && \
python3 ../x.py build --stage 0 src/tools/build-manifest && \
python3 ../x.py test --stage 0 src/tools/compiletest && \
python3 ../x.py test --stage 0 core alloc std test proc_macro && \
# Build both public and internal documentation.
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 library && \
mkdir -p /checkout/obj/staging/doc && \
cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 library/test && \
python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \
python3 ../x.py check --stage 1 --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
python3 ../x.py check --stage 1 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
/scripts/validate-toolstate.sh && \
/scripts/validate-error-codes.sh && \
reuse --include-submodules lint && \
python3 ../x.py test collect-license-metadata && \
# Runs checks to ensure that there are no issues in our JS code.
es-check es2019 ../src/librustdoc/html/static/js/*.js && \
eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js && \
eslint -c ../src/tools/rustdoc-js/.eslintrc.js ../src/tools/rustdoc-js/tester.js && \
eslint -c ../src/tools/rustdoc-gui/.eslintrc.js ../src/tools/rustdoc-gui/tester.js && \
tsc --project ../src/librustdoc/html/static/js/tsconfig.json

View file

@ -0,0 +1,39 @@
FROM ubuntu:22.04
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
ninja-build \
file \
curl \
ca-certificates \
python3 \
python3-pip \
python3-pkg-resources \
git \
cmake \
sudo \
gdb \
xz-utils \
libssl-dev \
pkg-config \
mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
ENV RUST_CONFIGURE_ARGS="--set rust.validate-mir-opts=3"
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV SCRIPT \
python3 ../x.py check && \
python3 ../x.py clippy ci && \
python3 ../x.py test --stage 1 core alloc std test proc_macro && \
python3 ../x.py doc --stage 0 bootstrap && \
# Build both public and internal documentation.
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 1 library && \
mkdir -p /checkout/obj/staging/doc && \
cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 1 library/test

View file

@ -24,17 +24,24 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
mingw-w64 \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/nodejs.sh /scripts/
RUN sh /scripts/nodejs.sh /node
ENV PATH="/node/bin:${PATH}"
# Install eslint
COPY host-x86_64/mingw-check-tidy/eslint.version /tmp/
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
COPY host-x86_64/mingw-check/reuse-requirements.txt /tmp/
COPY host-x86_64/mingw-check-1/reuse-requirements.txt /tmp/
RUN pip3 install --no-deps --no-cache-dir --require-hashes -r /tmp/reuse-requirements.txt \
&& pip3 install virtualenv
COPY host-x86_64/mingw-check/validate-toolstate.sh /scripts/
COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
COPY host-x86_64/mingw-check-1/validate-toolstate.sh /scripts/
COPY host-x86_64/mingw-check-1/validate-error-codes.sh /scripts/
# NOTE: intentionally uses python2 for x.py so we can test it still works.
# validate-toolstate only runs in our CI, so it's ok for it to only support python3.
ENV SCRIPT TIDY_PRINT_DIFF=1 python2.7 ../x.py test \
--stage 0 src/tools/tidy tidyselftest --extra-checks=py,cpp
ENV SCRIPT TIDY_PRINT_DIFF=1 npm install eslint@$(head -n 1 /tmp/eslint.version) && \
python2.7 ../x.py test --stage 0 src/tools/tidy tidyselftest --extra-checks=py,cpp

View file

@ -0,0 +1 @@
8.6.0

View file

@ -1,3 +1,15 @@
# Runs `distcheck`, which is a collection of smoke tests:
#
# - Run `make check` from an unpacked dist tarball to make sure we can at the
# minimum run check steps from those sources.
# - Check that selected dist components at least have expected directory shape
# and crate manifests that cargo can generate a lockfile from.
#
# Refer to `src/bootstrap/src/core/build_steps/test.rs` `Distcheck::run` for
# specifics.
#
# FIXME(#136822): dist components are generally under-tested.
FROM ubuntu:22.04
ARG DEBIAN_FRONTEND=noninteractive

View file

@ -29,5 +29,5 @@ RUN echo "optimize = false" >> /config/nopt-std-config.toml
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu \
--disable-optimize-tests \
--set rust.test-compare-mode
ENV SCRIPT python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std \
ENV SCRIPT python3 ../x.py test --stage 1 --config /config/nopt-std-config.toml library/std \
&& python3 ../x.py --stage 2 test

View file

@ -1,4 +1,4 @@
FROM ubuntu:22.04
FROM ghcr.io/rust-lang/ubuntu:22.04
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \

View file

@ -53,8 +53,8 @@ MIRIFLAGS="-Zmiri-force-intrinsic-fallback --cfg force_intrinsic_fallback -O -Zm
case $HOST_TARGET in
x86_64-unknown-linux-gnu)
# Only this branch runs in PR CI.
# Fully test all main OSes, including a 32bit target.
python3 "$X_PY" test --stage 2 src/tools/miri src/tools/miri/cargo-miri --target x86_64-apple-darwin
# Fully test all main OSes, and all main architectures.
python3 "$X_PY" test --stage 2 src/tools/miri src/tools/miri/cargo-miri --target aarch64-apple-darwin
python3 "$X_PY" test --stage 2 src/tools/miri src/tools/miri/cargo-miri --target i686-pc-windows-msvc
# Only run "pass" tests for the remaining targets, which is quite a bit faster.
python3 "$X_PY" test --stage 2 src/tools/miri --target x86_64-pc-windows-gnu --test-args pass
@ -69,7 +69,7 @@ case $HOST_TARGET in
#FIXME: Re-enable this once CI issues are fixed
# See <https://github.com/rust-lang/rust/issues/127883>
# For now, these tests are moved to `x86_64-msvc-ext2` in `src/ci/github-actions/jobs.yml`.
#python3 "$X_PY" test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass
#python3 "$X_PY" test --stage 2 src/tools/miri --target x86_64-apple-darwin --test-args pass
;;
*)
echo "FATAL: unexpected host $HOST_TARGET"

View file

@ -97,9 +97,8 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
docker --version
REGISTRY=ghcr.io
# Hardcode username to reuse cache between auto and pr jobs
# FIXME: should be changed after move from rust-lang-ci
REGISTRY_USERNAME=rust-lang-ci
# Default to `rust-lang` to allow reusing the cache for local builds
REGISTRY_USERNAME=${GITHUB_REPOSITORY_OWNER:-rust-lang}
# Tag used to push the final Docker image, so that it can be pulled by e.g. rustup
IMAGE_TAG=${REGISTRY}/${REGISTRY_USERNAME}/rust-ci:${cksum}
# Tag used to cache the Docker build

View file

@ -4,6 +4,8 @@ set -o errexit
set -o pipefail
set -o xtrace
source /tmp/shared.sh
ARCH="$1"
PHASE="$2"
@ -59,52 +61,13 @@ BINUTILS_TAR="$BINUTILS_BASE.tar.bz2"
BINUTILS_URL="https://ftp.gnu.org/gnu/binutils/$BINUTILS_TAR"
download_file() {
local file="$1"
local url="$2"
local sum="$3"
while :; do
if [[ -f "$file" ]]; then
if ! h="$(sha256sum "$file" | awk '{ print $1 }')"; then
printf 'ERROR: reading hash\n' >&2
exit 1
fi
if [[ "$h" == "$sum" ]]; then
return 0
fi
printf 'WARNING: hash mismatch: %s != expected %s\n' \
"$h" "$sum" >&2
rm -f "$file"
fi
printf 'Downloading: %s\n' "$url"
if ! curl -f -L -o "$file" "$url"; then
rm -f "$file"
sleep 1
fi
done
}
case "$PHASE" in
sysroot)
download_file "/tmp/$SYSROOT_TAR" "$SYSROOT_URL" "$SYSROOT_SUM"
mkdir -p "$SYSROOT_DIR"
cd "$SYSROOT_DIR"
tar -xzf "/tmp/$SYSROOT_TAR"
rm -f "/tmp/$SYSROOT_TAR"
download_tar_and_extract_into_dir "$SYSROOT_URL" "$SYSROOT_SUM" "$SYSROOT_DIR"
;;
binutils)
download_file "/tmp/$BINUTILS_TAR" "$BINUTILS_URL" "$BINUTILS_SUM"
mkdir -p /ws/src/binutils
cd /ws/src/binutils
tar -xjf "/tmp/$BINUTILS_TAR"
rm -f "/tmp/$BINUTILS_TAR"
download_tar_and_extract_into_dir "$BINUTILS_URL" "$BINUTILS_SUM" /ws/src/binutils
mkdir -p /ws/build/binutils
cd /ws/build/binutils
"/ws/src/binutils/$BINUTILS_BASE/configure" \
@ -123,12 +86,7 @@ binutils)
;;
gcc)
download_file "/tmp/$GCC_TAR" "$GCC_URL" "$GCC_SUM"
mkdir -p /ws/src/gcc
cd /ws/src/gcc
tar -xJf "/tmp/$GCC_TAR"
rm -f "/tmp/$GCC_TAR"
download_tar_and_extract_into_dir "$GCC_URL" "$GCC_SUM" /ws/src/gcc
mkdir -p /ws/build/gcc
cd /ws/build/gcc
export CFLAGS='-fPIC'

View file

@ -2,7 +2,7 @@
set -euo pipefail
LINUX_VERSION=v6.15-rc4
LINUX_VERSION=v6.16-rc1
# Build rustc, rustdoc, cargo, clippy-driver and rustfmt
../x.py build --stage 2 library rustdoc clippy rustfmt

View file

@ -40,3 +40,37 @@ function retry {
}
done
}
download_tar_and_extract_into_dir() {
local url="$1"
local sum="$2"
local dir="$3"
local file=$(mktemp -u)
while :; do
if [[ -f "$file" ]]; then
if ! h="$(sha256sum "$file" | awk '{ print $1 }')"; then
printf 'ERROR: reading hash\n' >&2
exit 1
fi
if [[ "$h" == "$sum" ]]; then
break
fi
printf 'WARNING: hash mismatch: %s != expected %s\n' "$h" "$sum" >&2
rm -f "$file"
fi
printf 'Downloading: %s\n' "$url"
if ! curl -f -L -o "$file" "$url"; then
rm -f "$file"
sleep 1
fi
done
mkdir -p "$dir"
cd "$dir"
tar -xf "$file"
rm -f "$file"
}

View file

@ -0,0 +1,162 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o xtrace
source /tmp/shared.sh
ARCH="$1"
PHASE="$2"
JOBS="$(getconf _NPROCESSORS_ONLN)"
case "$ARCH" in
x86_64)
SYSROOT_MACH='i386'
;;
sparcv9)
SYSROOT_MACH='sparc'
;;
*)
printf 'ERROR: unknown architecture: %s\n' "$ARCH"
exit 1
esac
BUILD_TARGET="$ARCH-pc-solaris2.11"
#
# The illumos and the Solaris build both use the same GCC-level host triple,
# though different versions of GCC are used and with different configuration
# options. To ensure as little accidental cross-pollination as possible, we
# build the illumos toolchain in a specific directory tree and just symlink the
# expected tools into /usr/local/bin at the end. We omit /usr/local/bin from
# PATH here for similar reasons.
#
PREFIX="/opt/solaris/$ARCH"
export PATH="$PREFIX/bin:/usr/bin:/bin:/usr/sbin:/sbin"
#
# NOTE: The compiler version selected here is more specific than might appear.
# GCC 7.X releases do not appear to cross-compile correctly for Solaris
# targets, at least insofar as they refuse to enable TLS in libstdc++. When
# changing the GCC version in future, one must carefully verify that TLS is
# enabled in all of the static libraries we intend to include in output
# binaries.
#
GCC_VERSION='8.4.0'
GCC_SUM='e30a6e52d10e1f27ed55104ad233c30bd1e99cfb5ff98ab022dc941edd1b2dd4'
GCC_BASE="gcc-$GCC_VERSION"
GCC_TAR="gcc-$GCC_VERSION.tar.xz"
GCC_URL="https://ci-mirrors.rust-lang.org/rustc/$GCC_TAR"
SYSROOT_VER='2025-02-21'
if [ $ARCH = "x86_64" ]; then
SYSROOT_SUM='e82b78c14464cc2dc71f3cdab312df3dd63441d7c23eeeaf34d41d8b947688d3'
SYSROOT_TAR="solaris-11.4.42.111.0-i386-sysroot-v$SYSROOT_VER.tar.bz2"
SYSROOT_DIR="$PREFIX/sysroot-x86_64"
else
SYSROOT_SUM='e249a7ef781b9b3297419bd014fa0574800703981d84e113d6af3a897a8b4ffc'
SYSROOT_TAR="solaris-11.4.42.111.0-sparc-sysroot-v$SYSROOT_VER.tar.bz2"
SYSROOT_DIR="$PREFIX/sysroot-sparcv9"
fi
SYSROOT_URL="https://ci-mirrors.rust-lang.org/rustc/$SYSROOT_TAR"
BINUTILS_VERSION='2.44'
BINUTILS_SUM='ce2017e059d63e67ddb9240e9d4ec49c2893605035cd60e92ad53177f4377237'
BINUTILS_BASE="binutils-$BINUTILS_VERSION"
BINUTILS_TAR="$BINUTILS_BASE.tar.xz"
BINUTILS_URL="https://ci-mirrors.rust-lang.org/rustc/$BINUTILS_TAR"
case "$PHASE" in
sysroot)
download_tar_and_extract_into_dir "$SYSROOT_URL" "$SYSROOT_SUM" "$SYSROOT_DIR"
;;
binutils)
download_tar_and_extract_into_dir "$BINUTILS_URL" "$BINUTILS_SUM" /ws/src/binutils
cat > binutils.patch <<EOF
Workaround for: https://github.com/rust-lang/rust/issues/137997
--- binutils-2.44/bfd/elflink.c
+++ binutils-2.44/bfd/elflink.c
@@ -5150,7 +5150,7 @@
if it is not a function, because it might be the version
symbol itself. FIXME: What if it isn't? */
if ((iver.vs_vers & VERSYM_HIDDEN) != 0
- || (vernum > 1
+ || (vernum > 1 && strcmp(name, "logb") != 0
&& (!bfd_is_abs_section (sec)
|| bed->is_function_type (ELF_ST_TYPE (isym->st_info)))))
{
EOF
f=binutils-$BINUTILS_VERSION/bfd/elflink.c && expand -t 4 "$f" > "$f.exp"
mv binutils-$BINUTILS_VERSION/bfd/elflink.c.exp binutils-$BINUTILS_VERSION/bfd/elflink.c
patch binutils-$BINUTILS_VERSION/bfd/elflink.c < binutils.patch
rm binutils.patch
mkdir -p /ws/build/binutils
cd /ws/build/binutils
"/ws/src/binutils/$BINUTILS_BASE/configure" \
--prefix="$PREFIX" \
--target="$BUILD_TARGET" \
--program-prefix="$ARCH-solaris-" \
--with-sysroot="$SYSROOT_DIR"
make -j "$JOBS"
mkdir -p "$PREFIX"
make install
cd
rm -rf /ws/src/binutils /ws/build/binutils
;;
gcc)
download_tar_and_extract_into_dir "$GCC_URL" "$GCC_SUM" /ws/src/gcc
mkdir -p /ws/build/gcc
cd /ws/build/gcc
export CFLAGS='-fPIC'
export CXXFLAGS='-fPIC'
export CXXFLAGS_FOR_TARGET='-fPIC'
export CFLAGS_FOR_TARGET='-fPIC'
"/ws/src/gcc/$GCC_BASE/configure" \
--prefix="$PREFIX" \
--target="$BUILD_TARGET" \
--program-prefix="$ARCH-solaris-" \
--with-sysroot="$SYSROOT_DIR" \
--with-gnu-as \
--with-gnu-ld \
--disable-nls \
--disable-libgomp \
--disable-libquadmath \
--disable-libssp \
--disable-libvtv \
--disable-libcilkrts \
--disable-libada \
--disable-libsanitizer \
--disable-libquadmath-support \
--disable-shared \
--enable-tls
make -j "$JOBS"
mkdir -p "$PREFIX"
make install
#
# Link toolchain commands into /usr/local/bin so that cmake and others
# can find them:
#
(cd "$PREFIX/bin" && ls -U) | grep "^$ARCH-solaris-" |
xargs -t -I% ln -s "$PREFIX/bin/%" '/usr/local/bin/'
cd
rm -rf /ws/src/gcc /ws/build/gcc
;;
*)
printf 'ERROR: unknown phase "%s"\n' "$PHASE" >&2
exit 100
;;
esac

View file

@ -2,8 +2,8 @@
set -ex
# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
../x.py --stage 2 test --skip src/tools/tidy
# NOTE: intentionally uses `x`, and `x.ps1` to make sure they work on Linux.
# Make sure that `x.py` is tested elsewhere.
# Run the `mir-opt` tests again but this time for a 32-bit target.
# This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have

View file

@ -10,7 +10,6 @@ runners:
free_disk: true
<<: *base-job
# Large runner used mainly for its bigger disk capacity
- &job-linux-4c-largedisk
os: ubuntu-24.04-4core-16gb
<<: *base-job
@ -35,8 +34,6 @@ runners:
os: windows-2022
<<: *base-job
# FIXME(#141022): Windows Server 2025 20250504.1.0 currently experiences
# insufficient disk space.
- &job-windows-25
os: windows-2025
<<: *base-job
@ -77,7 +74,6 @@ envs:
env-x86_64-apple-tests: &env-x86_64-apple-tests
SCRIPT: ./x.py check compiletest --set build.compiletest-use-stage0-libtest=true && ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact
RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
# Ensure that host tooling is tested on our minimum supported macOS version.
MACOSX_DEPLOYMENT_TARGET: 10.12
MACOSX_STD_DEPLOYMENT_TARGET: 10.12
@ -113,31 +109,54 @@ envs:
pr:
PR_CI_JOB: 1
jobs:
dist-x86_64-linux: &job-dist-x86_64-linux
name: dist-x86_64-linux
env:
CODEGEN_BACKENDS: llvm,cranelift
DOCKER_SCRIPT: dist.sh
<<: *job-linux-36c-codebuild
# Jobs that run on each push to a pull request (PR)
# These jobs automatically inherit envs.pr, to avoid repeating
# it in each job definition.
pr:
- name: mingw-check
- name: mingw-check-1
<<: *job-linux-4c
- name: mingw-check-2
<<: *job-linux-4c
- name: mingw-check-tidy
continue_on_error: true
free_disk: false
env:
# This submodule is expensive to checkout, and it should not be needed for
# tidy. This speeds up the PR CI job by ~1 minute.
SKIP_SUBMODULES: src/gcc
<<: *job-linux-4c
- name: x86_64-gnu-llvm-19
env:
ENABLE_GCC_CODEGEN: "1"
DOCKER_SCRIPT: x86_64-gnu-llvm.sh
<<: *job-linux-16c
<<: *job-linux-4c
- name: aarch64-gnu-llvm-19-1
env:
IMAGE: aarch64-gnu-llvm-19
DOCKER_SCRIPT: stage_2_test_set1.sh
<<: *job-aarch64-linux
- name: aarch64-gnu-llvm-19-2
env:
IMAGE: aarch64-gnu-llvm-19
DOCKER_SCRIPT: stage_2_test_set2.sh
<<: *job-aarch64-linux
- name: x86_64-gnu-tools
<<: *job-linux-16c
<<: *job-linux-36c-codebuild
# Jobs that run when you perform a try build (@bors try)
# These jobs automatically inherit envs.try, to avoid repeating
# it in each job definition.
try:
- name: dist-x86_64-linux
env:
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-linux-16c
- <<: *job-dist-x86_64-linux
# Main CI jobs that have to be green to merge a commit into master
# These jobs automatically inherit envs.auto, to avoid repeating
@ -167,8 +186,11 @@ auto:
- name: dist-android
<<: *job-linux-4c
- name: dist-arm-linux
<<: *job-linux-8c-codebuild
- name: dist-arm-linux-gnueabi
<<: *job-linux-4c
- name: dist-arm-linux-musl
<<: *job-linux-4c
- name: dist-armhf-linux
<<: *job-linux-4c
@ -203,8 +225,11 @@ auto:
- name: dist-powerpc64-linux
<<: *job-linux-4c
- name: dist-powerpc64le-linux
<<: *job-linux-4c-largedisk
- name: dist-powerpc64le-linux-gnu
<<: *job-linux-4c
- name: dist-powerpc64le-linux-musl
<<: *job-linux-4c
- name: dist-riscv64-linux
<<: *job-linux-4c
@ -224,16 +249,14 @@ auto:
- name: dist-x86_64-illumos
<<: *job-linux-4c
- name: dist-x86_64-linux
env:
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-linux-36c-codebuild
- <<: *job-dist-x86_64-linux
- name: dist-x86_64-linux-alt
env:
IMAGE: dist-x86_64-linux
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-linux-16c
DOCKER_SCRIPT: dist-alt.sh
<<: *job-linux-4c-largedisk
- name: dist-x86_64-musl
env:
@ -243,6 +266,12 @@ auto:
- name: dist-x86_64-netbsd
<<: *job-linux-4c
- name: dist-x86_64-solaris
<<: *job-linux-4c
- name: dist-sparcv9-solaris
<<: *job-linux-4c
# The i686-gnu job is split into multiple jobs to run tests in parallel.
# i686-gnu-1 skips tests that run in i686-gnu-2.
- name: i686-gnu-1
@ -271,11 +300,18 @@ auto:
env:
IMAGE: i686-gnu-nopt
DOCKER_SCRIPT: >-
python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std &&
python3 ../x.py test --stage 1 --config /config/nopt-std-config.toml library/std &&
/scripts/stage_2_test_set2.sh
<<: *job-linux-4c
- name: mingw-check
- name: mingw-check-1
<<: *job-linux-4c
- name: mingw-check-2
<<: *job-linux-4c
- name: mingw-check-tidy
free_disk: false
<<: *job-linux-4c
- name: test-various
@ -323,7 +359,7 @@ auto:
<<: *job-linux-4c
- name: x86_64-gnu-distcheck
<<: *job-linux-8c
<<: *job-linux-4c
# The x86_64-gnu-llvm-20 job is split into multiple jobs to run tests in parallel.
# x86_64-gnu-llvm-20-1 skips tests that run in x86_64-gnu-llvm-20-{2,3}.
@ -331,7 +367,7 @@ auto:
env:
RUST_BACKTRACE: 1
IMAGE: x86_64-gnu-llvm-20
DOCKER_SCRIPT: stage_2_test_set1.sh
DOCKER_SCRIPT: stage_2_test_set2.sh
<<: *job-linux-4c
# Skip tests that run in x86_64-gnu-llvm-20-{1,3}
@ -356,7 +392,7 @@ auto:
env:
RUST_BACKTRACE: 1
IMAGE: x86_64-gnu-llvm-19
DOCKER_SCRIPT: stage_2_test_set1.sh
DOCKER_SCRIPT: stage_2_test_set2.sh
<<: *job-linux-4c
# Skip tests that run in x86_64-gnu-llvm-19-{1,3}
@ -391,7 +427,6 @@ auto:
env:
SCRIPT: ./x.py dist bootstrap --include-default-paths --host=x86_64-apple-darwin --target=x86_64-apple-darwin
RUST_CONFIGURE_ARGS: --enable-full-tools --enable-sanitizers --enable-profiler --set rust.jemalloc --set rust.lto=thin --set rust.codegen-units=1
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
# Ensure that host tooling is built to support our minimum support macOS version.
MACOSX_DEPLOYMENT_TARGET: 10.12
MACOSX_STD_DEPLOYMENT_TARGET: 10.12
@ -409,7 +444,6 @@ auto:
# Mac Catalyst cannot currently compile the sanitizer:
# https://github.com/rust-lang/rust/issues/129069
RUST_CONFIGURE_ARGS: --enable-sanitizers --enable-profiler --set rust.jemalloc --set target.aarch64-apple-ios-macabi.sanitizers=false --set target.x86_64-apple-ios-macabi.sanitizers=false
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
# Ensure that host tooling is built to support our minimum support macOS version.
# FIXME(madsmtm): This might be redundant, as we're not building host tooling here (?)
MACOSX_DEPLOYMENT_TARGET: 10.12
@ -442,7 +476,6 @@ auto:
--set llvm.ninja=false
--set rust.lto=thin
--set rust.codegen-units=1
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
SELECT_XCODE: /Applications/Xcode_15.4.app
USE_XCODE_CLANG: 1
# Aarch64 tooling only needs to support macOS 11.0 and up as nothing else
@ -458,12 +491,13 @@ auto:
- name: aarch64-apple
env:
SCRIPT: ./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin
SCRIPT: >
./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin &&
./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin src/tools/cargo
RUST_CONFIGURE_ARGS: >-
--enable-sanitizers
--enable-profiler
--set rust.jemalloc
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
SELECT_XCODE: /Applications/Xcode_15.4.app
USE_XCODE_CLANG: 1
# Aarch64 tooling only needs to support macOS 11.0 and up as nothing else
@ -484,17 +518,13 @@ auto:
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-sanitizers --enable-profiler
SCRIPT: make ci-msvc-py
# FIXME(#141022): Windows Server 2025 20250504.1.0 currently experiences
# insufficient disk space.
<<: *job-windows
<<: *job-windows-25
- name: x86_64-msvc-2
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-sanitizers --enable-profiler
SCRIPT: make ci-msvc-ps1
# FIXME(#141022): Windows Server 2025 20250504.1.0 currently experiences
# insufficient disk space.
<<: *job-windows
<<: *job-windows-25
# i686-msvc is split into two jobs to run tests in parallel.
- name: i686-msvc-1
@ -522,11 +552,13 @@ auto:
- name: x86_64-msvc-ext2
env:
SCRIPT: >
python x.py test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass &&
python x.py test --stage 2 src/tools/miri --target x86_64-apple-darwin --test-args pass &&
python x.py test --stage 2 src/tools/miri --target x86_64-pc-windows-gnu --test-args pass &&
python x.py miri --stage 2 library/core --test-args notest &&
python x.py miri --stage 2 library/alloc --test-args notest &&
python x.py miri --stage 2 library/std --test-args notest
# The last 3 lines smoke-test `x.py miri`. This doesn't run any actual tests (that would take
# too long), but it ensures that the crates build properly when tested with Miri.
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld
<<: *job-windows

View file

@ -55,7 +55,11 @@ for i in ${!modules[@]}; do
bg_pids[${i}]=$!
continue
else
# Submodule paths contained in SKIP_SUBMODULES (comma-separated list) will not be
# checked out.
if [ -z "${SKIP_SUBMODULES:-}" ] || [[ ! ",$SKIP_SUBMODULES," = *",$module,"* ]]; then
use_git="$use_git $module"
fi
fi
done
retry sh -c "git submodule deinit -f $use_git && \

View file

@ -15,7 +15,8 @@ fi
branch=$(git branch --show-current || echo)
if [ -n "$branch" ]; then
branch="${branch}-"
# Strip automation/bors/ prefix if present
branch="${branch#automation/bors/}-"
fi
if [ "${GITHUB_EVENT_NAME:=none}" = "pull_request" ]; then

View file

@ -10,8 +10,8 @@ IFS=$'\n\t'
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
# Update both macOS's and Windows's tarballs when bumping the version here.
# Try to keep this in sync with src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh
LLVM_VERSION="18.1.4"
# Try to keep this in sync with src/ci/docker/scripts/build-clang.sh
LLVM_VERSION="20.1.3"
if isMacOS; then
# FIXME: This is the latest pre-built version of LLVM that's available for

@ -1 +1 @@
Subproject commit 230c68bc1e08f5f3228384a28cc228c81dfbd10d
Subproject commit 634724ea85ebb08a542970bf8871ac8b0f77fd15

@ -1 +1 @@
Subproject commit 1b1bb49babd65c732468cfa515b0c009bd1d26bc
Subproject commit aa6ce337c0adf7a63e33960d184270f2a45ab9ef

@ -1 +1 @@
Subproject commit 0b8219ac23a3e09464e4e0166c768cf1c4bba0d5
Subproject commit 10fa1e084365f23f24ad0000df541923385b73b6

@ -1 +1 @@
Subproject commit c76a20f0d987145dcedf05c5c073ce8d91f2e82a
Subproject commit 8b61acfaea822e9ac926190bc8f15791c33336e8

@ -1 +1 @@
Subproject commit acd0231ebc74849f6a8907b5e646ce86721aad76
Subproject commit 8e0f593a30f3b56ddb0908fb7ab9249974e08738

@ -1 +1 @@
Subproject commit c9d151f9147c4808c77f0375ba3fa5d54443cb9e
Subproject commit 21f4e32b8b40d36453fae16ec07ad4b857c445b6

Some files were not shown because too many files have changed in this diff Show more