Merge pull request #4532 from rust-lang/rustup-2025-08-20

Automatic Rustup
This commit is contained in:
Ralf Jung 2025-08-20 08:14:57 +00:00 committed by GitHub
commit 46765526e3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
609 changed files with 15878 additions and 9046 deletions

View file

@ -179,6 +179,16 @@ fn main() {
}
}
// Here we pass additional paths that essentially act as a sysroot.
// These are used to load rustc crates (e.g. `extern crate rustc_ast;`)
// for rustc_private tools, so that we do not have to copy them into the
// actual sysroot of the compiler that builds the tool.
if let Ok(dirs) = env::var("RUSTC_ADDITIONAL_SYSROOT_PATHS") {
for dir in dirs.split(",") {
cmd.arg(format!("-L{dir}"));
}
}
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those

View file

@ -1,5 +1,8 @@
//! Implementation of compiling the compiler and standard library, in "check"-based modes.
use std::fs;
use std::path::{Path, PathBuf};
use crate::core::build_steps::compile::{
add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, std_crates_for_run_make,
};
@ -9,11 +12,11 @@ use crate::core::build_steps::tool::{
prepare_tool_cargo,
};
use crate::core::builder::{
self, Alias, Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata, crate_description,
self, Alias, Builder, Cargo, Kind, RunConfig, ShouldRun, Step, StepMetadata, crate_description,
};
use crate::core::config::TargetSelection;
use crate::utils::build_stamp::{self, BuildStamp};
use crate::{CodegenBackendKind, Compiler, Mode, Subcommand};
use crate::{CodegenBackendKind, Compiler, Mode, Subcommand, t};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Std {
@ -33,7 +36,7 @@ impl Std {
}
impl Step for Std {
type Output = ();
type Output = BuildStamp;
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@ -60,13 +63,14 @@ impl Step for Std {
let crates = std_crates_for_run_make(&run);
run.builder.ensure(Std {
build_compiler: prepare_compiler_for_check(run.builder, run.target, Mode::Std),
build_compiler: prepare_compiler_for_check(run.builder, run.target, Mode::Std)
.build_compiler(),
target: run.target,
crates,
});
}
fn run(self, builder: &Builder<'_>) {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let build_compiler = self.build_compiler;
let target = self.target;
@ -93,18 +97,27 @@ impl Step for Std {
Kind::Check,
format_args!("library artifacts{}", crate_description(&self.crates)),
Mode::Std,
self.build_compiler,
build_compiler,
target,
);
let stamp = build_stamp::libstd_stamp(builder, build_compiler, target).with_prefix("check");
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
let check_stamp =
build_stamp::libstd_stamp(builder, build_compiler, target).with_prefix("check");
run_cargo(
builder,
cargo,
builder.config.free_args.clone(),
&check_stamp,
vec![],
true,
false,
);
drop(_guard);
// don't check test dependencies if we haven't built libtest
if !self.crates.iter().any(|krate| krate == "test") {
return;
return check_stamp;
}
// Then run cargo again, once we've put the rmeta files for the library
@ -137,10 +150,11 @@ impl Step for Std {
Kind::Check,
"library test/bench/example targets",
Mode::Std,
self.build_compiler,
build_compiler,
target,
);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
check_stamp
}
fn metadata(&self) -> Option<StepMetadata> {
@ -148,12 +162,135 @@ impl Step for Std {
}
}
/// Checks rustc using `build_compiler` and copies the built
/// .rmeta files into the sysroot of `build_compiler`.
/// Represents a proof that rustc was **checked**.
/// Contains directories with .rmeta files generated by checking rustc for a specific
/// target.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct RmetaSysroot {
host_dir: PathBuf,
target_dir: PathBuf,
}
impl RmetaSysroot {
/// Copy rmeta artifacts from the given `stamp` into a sysroot located at `directory`.
fn from_stamp(
builder: &Builder<'_>,
stamp: BuildStamp,
target: TargetSelection,
directory: &Path,
) -> Self {
let host_dir = directory.join("host");
let target_dir = directory.join(target);
let _ = fs::remove_dir_all(directory);
t!(fs::create_dir_all(directory));
add_to_sysroot(builder, &target_dir, &host_dir, &stamp);
Self { host_dir, target_dir }
}
/// Configure the given cargo invocation so that the compiled crate will be able to use
/// rustc .rmeta artifacts that were previously generated.
fn configure_cargo(&self, cargo: &mut Cargo) {
cargo.append_to_env(
"RUSTC_ADDITIONAL_SYSROOT_PATHS",
format!("{},{}", self.host_dir.to_str().unwrap(), self.target_dir.to_str().unwrap()),
",",
);
}
}
/// Checks rustc using the given `build_compiler` for the given `target`, and produces
/// a sysroot in the build directory that stores the generated .rmeta files.
///
/// This step exists so that we can store the generated .rmeta artifacts into a separate
/// directory, instead of copying them into the sysroot of `build_compiler`, which would
/// "pollute" it (that is especially problematic for the external stage0 rustc).
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct PrepareRustcRmetaSysroot {
build_compiler: CompilerForCheck,
target: TargetSelection,
}
impl PrepareRustcRmetaSysroot {
fn new(build_compiler: CompilerForCheck, target: TargetSelection) -> Self {
Self { build_compiler, target }
}
}
impl Step for PrepareRustcRmetaSysroot {
type Output = RmetaSysroot;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.never()
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
// Check rustc
let stamp = builder.ensure(Rustc::from_build_compiler(
self.build_compiler.clone(),
self.target,
vec![],
));
let build_compiler = self.build_compiler.build_compiler();
// Copy the generated rmeta artifacts to a separate directory
let dir = builder
.out
.join(build_compiler.host)
.join(format!("stage{}-rustc-rmeta-artifacts", build_compiler.stage + 1));
RmetaSysroot::from_stamp(builder, stamp, self.target, &dir)
}
}
/// Checks std using the given `build_compiler` for the given `target`, and produces
/// a sysroot in the build directory that stores the generated .rmeta files.
///
/// This step exists so that we can store the generated .rmeta artifacts into a separate
/// directory, instead of copying them into the sysroot of `build_compiler`, which would
/// "pollute" it (that is especially problematic for the external stage0 rustc).
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct PrepareStdRmetaSysroot {
build_compiler: Compiler,
target: TargetSelection,
}
impl PrepareStdRmetaSysroot {
fn new(build_compiler: Compiler, target: TargetSelection) -> Self {
Self { build_compiler, target }
}
}
impl Step for PrepareStdRmetaSysroot {
type Output = RmetaSysroot;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.never()
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
// Check std
let stamp = builder.ensure(Std {
build_compiler: self.build_compiler,
target: self.target,
crates: vec![],
});
// Copy the generated rmeta artifacts to a separate directory
let dir = builder
.out
.join(self.build_compiler.host)
.join(format!("stage{}-std-rmeta-artifacts", self.build_compiler.stage));
RmetaSysroot::from_stamp(builder, stamp, self.target, &dir)
}
}
/// Checks rustc using `build_compiler`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
/// Compiler that will check this rustc.
pub build_compiler: Compiler,
pub build_compiler: CompilerForCheck,
pub target: TargetSelection,
/// Whether to build only a subset of crates.
///
@ -166,12 +303,20 @@ pub struct Rustc {
impl Rustc {
pub fn new(builder: &Builder<'_>, target: TargetSelection, crates: Vec<String>) -> Self {
let build_compiler = prepare_compiler_for_check(builder, target, Mode::Rustc);
Self::from_build_compiler(build_compiler, target, crates)
}
fn from_build_compiler(
build_compiler: CompilerForCheck,
target: TargetSelection,
crates: Vec<String>,
) -> Self {
Self { build_compiler, target, crates }
}
}
impl Step for Rustc {
type Output = ();
type Output = BuildStamp;
const IS_HOST: bool = true;
const DEFAULT: bool = true;
@ -191,8 +336,8 @@ impl Step for Rustc {
/// created will also be linked into the sysroot directory.
///
/// If we check a stage 2 compiler, we will have to first build a stage 1 compiler to check it.
fn run(self, builder: &Builder<'_>) {
let build_compiler = self.build_compiler;
fn run(self, builder: &Builder<'_>) -> Self::Output {
let build_compiler = self.build_compiler.build_compiler;
let target = self.target;
let mut cargo = builder::Cargo::new(
@ -205,6 +350,7 @@ impl Step for Rustc {
);
rustc_cargo(builder, &mut cargo, target, &build_compiler, &self.crates);
self.build_compiler.configure_cargo(&mut cargo);
// Explicitly pass -p for all compiler crates -- this will force cargo
// to also check the tests/benches/examples for these crates, rather
@ -217,7 +363,7 @@ impl Step for Rustc {
Kind::Check,
format_args!("compiler artifacts{}", crate_description(&self.crates)),
Mode::Rustc,
self.build_compiler,
self.build_compiler.build_compiler(),
target,
);
@ -226,13 +372,12 @@ impl Step for Rustc {
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
let libdir = builder.sysroot_target_libdir(build_compiler, target);
let hostdir = builder.sysroot_target_libdir(build_compiler, build_compiler.host);
add_to_sysroot(builder, &libdir, &hostdir, &stamp);
stamp
}
fn metadata(&self) -> Option<StepMetadata> {
let metadata = StepMetadata::check("rustc", self.target).built_by(self.build_compiler);
let metadata = StepMetadata::check("rustc", self.target)
.built_by(self.build_compiler.build_compiler());
let metadata = if self.crates.is_empty() {
metadata
} else {
@ -242,45 +387,101 @@ impl Step for Rustc {
}
}
/// Represents a compiler that can check something.
///
/// If the compiler was created for `Mode::ToolRustc` or `Mode::Codegen`, it will also contain
/// .rmeta artifacts from rustc that was already checked using `build_compiler`.
///
/// All steps that use this struct in a "general way" (i.e. they don't know exactly what kind of
/// thing is being built) should call `configure_cargo` to ensure that the rmeta artifacts are
/// properly linked, if present.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CompilerForCheck {
build_compiler: Compiler,
rustc_rmeta_sysroot: Option<RmetaSysroot>,
std_rmeta_sysroot: Option<RmetaSysroot>,
}
impl CompilerForCheck {
pub fn build_compiler(&self) -> Compiler {
self.build_compiler
}
/// If there are any rustc rmeta artifacts available, configure the Cargo invocation
/// so that the artifact being built can find them.
pub fn configure_cargo(&self, cargo: &mut Cargo) {
if let Some(sysroot) = &self.rustc_rmeta_sysroot {
sysroot.configure_cargo(cargo);
}
if let Some(sysroot) = &self.std_rmeta_sysroot {
sysroot.configure_cargo(cargo);
}
}
}
/// Prepare the standard library for checking something (that requires stdlib) using
/// `build_compiler`.
fn prepare_std(
builder: &Builder<'_>,
build_compiler: Compiler,
target: TargetSelection,
) -> Option<RmetaSysroot> {
// We need to build the host stdlib even if we only check, to compile build scripts and proc
// macros
builder.std(build_compiler, builder.host_target);
// If we're cross-compiling, we generate the rmeta files for the given target
// This check has to be here, because if we generate both .so and .rmeta files, rustc will fail,
// as it will have multiple candidates for linking.
if builder.host_target != target {
Some(builder.ensure(PrepareStdRmetaSysroot::new(build_compiler, target)))
} else {
None
}
}
/// Prepares a compiler that will check something with the given `mode`.
pub fn prepare_compiler_for_check(
builder: &Builder<'_>,
target: TargetSelection,
mode: Mode,
) -> Compiler {
) -> CompilerForCheck {
let host = builder.host_target;
match mode {
let mut rustc_rmeta_sysroot = None;
let mut std_rmeta_sysroot = None;
let build_compiler = match mode {
Mode::ToolBootstrap => builder.compiler(0, host),
// We could also only check std here and use `prepare_std`, but `ToolTarget` is currently
// only used for running in-tree Clippy on bootstrap tools, so it does not seem worth it to
// optimize it. Therefore, here we build std for the target, instead of just checking it.
Mode::ToolTarget => get_tool_target_compiler(builder, ToolTargetBuildMode::Build(target)),
Mode::ToolStd => {
if builder.config.compile_time_deps {
// When --compile-time-deps is passed, we can't use any rustc
// other than the bootstrap compiler. Luckily build scripts and
// proc macros for tools are unlikely to need nightly.
return builder.compiler(0, host);
builder.compiler(0, host)
} else {
// These tools require the local standard library to be checked
let build_compiler = builder.compiler(builder.top_stage, host);
std_rmeta_sysroot = prepare_std(builder, build_compiler, target);
build_compiler
}
// These tools require the local standard library to be checked
let build_compiler = builder.compiler(builder.top_stage, host);
// We need to build the host stdlib to check the tool itself.
// We need to build the target stdlib so that the tool can link to it.
builder.std(build_compiler, host);
// We could only check this library in theory, but `check::Std` doesn't copy rmetas
// into `build_compiler`'s sysroot to avoid clashes with `.rlibs`, so we build it
// instead.
builder.std(build_compiler, target);
build_compiler
}
Mode::ToolRustc | Mode::Codegen => {
// Check Rustc to produce the required rmeta artifacts for rustc_private, and then
// return the build compiler that was used to check rustc.
// We do not need to check examples/tests/etc. of Rustc for rustc_private, so we pass
// an empty set of crates, which will avoid using `cargo -p`.
let check = Rustc::new(builder, target, vec![]);
let build_compiler = check.build_compiler;
builder.ensure(check);
let compiler_for_rustc = prepare_compiler_for_check(builder, target, Mode::Rustc);
rustc_rmeta_sysroot = Some(
builder.ensure(PrepareRustcRmetaSysroot::new(compiler_for_rustc.clone(), target)),
);
let build_compiler = compiler_for_rustc.build_compiler();
// To check a rustc_private tool, we also need to check std that it will link to
std_rmeta_sysroot = prepare_std(builder, build_compiler, target);
build_compiler
}
Mode::Rustc => {
@ -294,15 +495,8 @@ pub fn prepare_compiler_for_check(
let stage = if host == target { builder.top_stage - 1 } else { builder.top_stage };
let build_compiler = builder.compiler(stage, host);
// Build host std for compiling build scripts
builder.std(build_compiler, build_compiler.host);
// Build target std so that the checked rustc can link to it during the check
// FIXME: maybe we can a way to only do a check of std here?
// But for that we would have to copy the stdlib rmetas to the sysroot of the build
// compiler, which conflicts with std rlibs, if we also build std.
builder.std(build_compiler, target);
// To check rustc, we need to check std that it will link to
std_rmeta_sysroot = prepare_std(builder, build_compiler, target);
build_compiler
}
Mode::Std => {
@ -311,13 +505,14 @@ pub fn prepare_compiler_for_check(
// stage 0 stdlib is used to compile build scripts and proc macros.
builder.compiler(builder.top_stage, host)
}
}
};
CompilerForCheck { build_compiler, rustc_rmeta_sysroot, std_rmeta_sysroot }
}
/// Check the Cranelift codegen backend.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CraneliftCodegenBackend {
build_compiler: Compiler,
build_compiler: CompilerForCheck,
target: TargetSelection,
}
@ -332,12 +527,14 @@ impl Step for CraneliftCodegenBackend {
}
fn make_run(run: RunConfig<'_>) {
let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::Codegen);
run.builder.ensure(CraneliftCodegenBackend { build_compiler, target: run.target });
run.builder.ensure(CraneliftCodegenBackend {
build_compiler: prepare_compiler_for_check(run.builder, run.target, Mode::Codegen),
target: run.target,
});
}
fn run(self, builder: &Builder<'_>) {
let build_compiler = self.build_compiler;
let build_compiler = self.build_compiler.build_compiler();
let target = self.target;
let mut cargo = builder::Cargo::new(
@ -353,12 +550,13 @@ impl Step for CraneliftCodegenBackend {
.arg("--manifest-path")
.arg(builder.src.join("compiler/rustc_codegen_cranelift/Cargo.toml"));
rustc_cargo_env(builder, &mut cargo, target);
self.build_compiler.configure_cargo(&mut cargo);
let _guard = builder.msg(
Kind::Check,
"rustc_codegen_cranelift",
Mode::Codegen,
self.build_compiler,
build_compiler,
target,
);
@ -376,7 +574,7 @@ impl Step for CraneliftCodegenBackend {
fn metadata(&self) -> Option<StepMetadata> {
Some(
StepMetadata::check("rustc_codegen_cranelift", self.target)
.built_by(self.build_compiler),
.built_by(self.build_compiler.build_compiler()),
)
}
}
@ -384,7 +582,7 @@ impl Step for CraneliftCodegenBackend {
/// Check the GCC codegen backend.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GccCodegenBackend {
build_compiler: Compiler,
build_compiler: CompilerForCheck,
target: TargetSelection,
}
@ -399,8 +597,10 @@ impl Step for GccCodegenBackend {
}
fn make_run(run: RunConfig<'_>) {
let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::Codegen);
run.builder.ensure(GccCodegenBackend { build_compiler, target: run.target });
run.builder.ensure(GccCodegenBackend {
build_compiler: prepare_compiler_for_check(run.builder, run.target, Mode::Codegen),
target: run.target,
});
}
fn run(self, builder: &Builder<'_>) {
@ -410,7 +610,7 @@ impl Step for GccCodegenBackend {
return;
}
let build_compiler = self.build_compiler;
let build_compiler = self.build_compiler.build_compiler();
let target = self.target;
let mut cargo = builder::Cargo::new(
@ -424,14 +624,10 @@ impl Step for GccCodegenBackend {
cargo.arg("--manifest-path").arg(builder.src.join("compiler/rustc_codegen_gcc/Cargo.toml"));
rustc_cargo_env(builder, &mut cargo, target);
self.build_compiler.configure_cargo(&mut cargo);
let _guard = builder.msg(
Kind::Check,
"rustc_codegen_gcc",
Mode::Codegen,
self.build_compiler,
target,
);
let _guard =
builder.msg(Kind::Check, "rustc_codegen_gcc", Mode::Codegen, build_compiler, target);
let stamp = build_stamp::codegen_backend_stamp(
builder,
@ -445,7 +641,10 @@ impl Step for GccCodegenBackend {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check("rustc_codegen_gcc", self.target).built_by(self.build_compiler))
Some(
StepMetadata::check("rustc_codegen_gcc", self.target)
.built_by(self.build_compiler.build_compiler()),
)
}
}
@ -467,8 +666,8 @@ macro_rules! tool_check_step {
) => {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
pub build_compiler: Compiler,
pub target: TargetSelection,
compiler: CompilerForCheck,
target: TargetSelection,
}
impl Step for $name {
@ -486,7 +685,7 @@ macro_rules! tool_check_step {
let builder = run.builder;
let mode = $mode(builder);
let build_compiler = prepare_compiler_for_check(run.builder, target, mode);
let compiler = prepare_compiler_for_check(run.builder, target, mode);
// It doesn't make sense to cross-check bootstrap tools
if mode == Mode::ToolBootstrap && target != run.builder.host_target {
@ -494,11 +693,11 @@ macro_rules! tool_check_step {
return;
};
run.builder.ensure($name { target, build_compiler });
run.builder.ensure($name { target, compiler });
}
fn run(self, builder: &Builder<'_>) {
let Self { target, build_compiler } = self;
let Self { target, compiler } = self;
let allow_features = {
let mut _value = "";
$( _value = $allow_features; )?
@ -506,11 +705,11 @@ macro_rules! tool_check_step {
};
let extra_features: &[&str] = &[$($($enable_features),*)?];
let mode = $mode(builder);
run_tool_check_step(builder, build_compiler, target, $path, mode, allow_features, extra_features);
run_tool_check_step(builder, compiler, target, $path, mode, allow_features, extra_features);
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::check(stringify!($name), self.target).built_by(self.build_compiler))
Some(StepMetadata::check(stringify!($name), self.target).built_by(self.compiler.build_compiler))
}
}
}
@ -519,7 +718,7 @@ macro_rules! tool_check_step {
/// Used by the implementation of `Step::run` in `tool_check_step!`.
fn run_tool_check_step(
builder: &Builder<'_>,
build_compiler: Compiler,
compiler: CompilerForCheck,
target: TargetSelection,
path: &str,
mode: Mode,
@ -528,6 +727,8 @@ fn run_tool_check_step(
) {
let display_name = path.rsplit('/').next().unwrap();
let build_compiler = compiler.build_compiler();
let extra_features = extra_features.iter().map(|f| f.to_string()).collect::<Vec<String>>();
let mut cargo = prepare_tool_cargo(
builder,
@ -544,6 +745,7 @@ fn run_tool_check_step(
&extra_features,
);
cargo.allow_features(allow_features);
compiler.configure_cargo(&mut cargo);
// FIXME: check bootstrap doesn't currently work when multiple targets are checked
// FIXME: rust-analyzer does not work with --all-targets

View file

@ -18,7 +18,7 @@ use build_helper::exit;
use super::compile::{run_cargo, rustc_cargo, std_cargo};
use super::tool::{SourceType, prepare_tool_cargo};
use crate::builder::{Builder, ShouldRun};
use crate::core::build_steps::check::prepare_compiler_for_check;
use crate::core::build_steps::check::{CompilerForCheck, prepare_compiler_for_check};
use crate::core::build_steps::compile::std_crates_for_run_make;
use crate::core::builder;
use crate::core::builder::{Alias, Kind, RunConfig, Step, StepMetadata, crate_description};
@ -231,7 +231,7 @@ impl Step for Std {
/// in-tree rustc.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
build_compiler: Compiler,
build_compiler: CompilerForCheck,
target: TargetSelection,
config: LintConfig,
/// Whether to lint only a subset of crates.
@ -271,7 +271,7 @@ impl Step for Rustc {
}
fn run(self, builder: &Builder<'_>) {
let build_compiler = self.build_compiler;
let build_compiler = self.build_compiler.build_compiler();
let target = self.target;
let mut cargo = builder::Cargo::new(
@ -284,6 +284,7 @@ impl Step for Rustc {
);
rustc_cargo(builder, &mut cargo, target, &build_compiler, &self.crates);
self.build_compiler.configure_cargo(&mut cargo);
// Explicitly pass -p for all compiler crates -- this will force cargo
// to also lint the tests/benches/examples for these crates, rather
@ -312,13 +313,16 @@ impl Step for Rustc {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::clippy("rustc", self.target).built_by(self.build_compiler))
Some(
StepMetadata::clippy("rustc", self.target)
.built_by(self.build_compiler.build_compiler()),
)
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CodegenGcc {
build_compiler: Compiler,
build_compiler: CompilerForCheck,
target: TargetSelection,
config: LintConfig,
}
@ -347,10 +351,10 @@ impl Step for CodegenGcc {
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let build_compiler = self.build_compiler;
let build_compiler = self.build_compiler.build_compiler();
let target = self.target;
let cargo = prepare_tool_cargo(
let mut cargo = prepare_tool_cargo(
builder,
build_compiler,
Mode::Codegen,
@ -360,6 +364,7 @@ impl Step for CodegenGcc {
SourceType::InTree,
&[],
);
self.build_compiler.configure_cargo(&mut cargo);
let _guard =
builder.msg(Kind::Clippy, "rustc_codegen_gcc", Mode::ToolRustc, build_compiler, target);
@ -379,7 +384,10 @@ impl Step for CodegenGcc {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::clippy("rustc_codegen_gcc", self.target).built_by(self.build_compiler))
Some(
StepMetadata::clippy("rustc_codegen_gcc", self.target)
.built_by(self.build_compiler.build_compiler()),
)
}
}
@ -396,7 +404,7 @@ macro_rules! lint_any {
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
build_compiler: Compiler,
build_compiler: CompilerForCheck,
target: TargetSelection,
config: LintConfig,
}
@ -419,9 +427,9 @@ macro_rules! lint_any {
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let build_compiler = self.build_compiler;
let build_compiler = self.build_compiler.build_compiler();
let target = self.target;
let cargo = prepare_tool_cargo(
let mut cargo = prepare_tool_cargo(
builder,
build_compiler,
$mode,
@ -431,6 +439,7 @@ macro_rules! lint_any {
SourceType::InTree,
&[],
);
self.build_compiler.configure_cargo(&mut cargo);
let _guard = builder.msg(
Kind::Clippy,
@ -456,7 +465,7 @@ macro_rules! lint_any {
}
fn metadata(&self) -> Option<StepMetadata> {
Some(StepMetadata::clippy($readable_name, self.target).built_by(self.build_compiler))
Some(StepMetadata::clippy($readable_name, self.target).built_by(self.build_compiler.build_compiler()))
}
}
)+

View file

@ -12,6 +12,7 @@ use std::ffi::OsStr;
use std::io::BufReader;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use std::{env, fs, str};
use serde_derive::Deserialize;
@ -38,7 +39,7 @@ use crate::{
};
/// Build a standard library for the given `target` using the given `build_compiler`.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Std {
pub target: TargetSelection,
/// Compiler that builds the standard library.
@ -933,13 +934,22 @@ fn cp_rustc_component_to_ci_sysroot(builder: &Builder<'_>, sysroot: &Path, conte
}
}
/// Represents information about a built rustc.
#[derive(Clone, Debug)]
pub struct BuiltRustc {
/// The compiler that actually built this *rustc*.
/// This can be different from the *build_compiler* passed to the `Rustc` step because of
/// uplifting.
pub build_compiler: Compiler,
}
/// Build rustc using the passed `build_compiler`.
///
/// - Makes sure that `build_compiler` has a standard library prepared for its host target,
/// so that it can compile build scripts and proc macros when building this `rustc`.
/// - Makes sure that `build_compiler` has a standard library prepared for `target`,
/// so that the built `rustc` can *link to it* and use it at runtime.
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
/// The target on which rustc will run (its host).
pub target: TargetSelection,
@ -960,7 +970,7 @@ impl Rustc {
}
impl Step for Rustc {
type Output = ();
type Output = BuiltRustc;
const IS_HOST: bool = true;
const DEFAULT: bool = false;
@ -1000,7 +1010,7 @@ impl Step for Rustc {
/// This will build the compiler for a particular stage of the build using
/// the `build_compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder<'_>) {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let build_compiler = self.build_compiler;
let target = self.target;
@ -1016,7 +1026,7 @@ impl Step for Rustc {
&sysroot,
builder.config.ci_rustc_dev_contents(),
);
return;
return BuiltRustc { build_compiler };
}
// Build a standard library for `target` using the `build_compiler`.
@ -1028,9 +1038,9 @@ impl Step for Rustc {
builder.info("WARNING: Using a potentially old librustc. This may not behave well.");
builder.info("WARNING: Use `--keep-stage-std` if you want to rebuild the compiler when it changes");
builder.ensure(RustcLink::from_rustc(self, build_compiler));
builder.ensure(RustcLink::from_rustc(self));
return;
return BuiltRustc { build_compiler };
}
// The stage of the compiler that we're building
@ -1042,21 +1052,35 @@ impl Step for Rustc {
&& !builder.config.full_bootstrap
&& (target == builder.host_target || builder.hosts.contains(&target))
{
// If we're cross-compiling, the earliest rustc that we could have is stage 2.
// If we're not cross-compiling, then we should have rustc stage 1.
let stage_to_uplift = if target == builder.host_target { 1 } else { 2 };
let rustc_to_uplift = builder.compiler(stage_to_uplift, target);
let msg = if rustc_to_uplift.host == target {
format!("Uplifting rustc (stage{} -> stage{stage})", rustc_to_uplift.stage,)
// Here we need to determine the **build compiler** that built the stage that we will
// be uplifting. We cannot uplift stage 1, as it has a different ABI than stage 2+,
// so we always uplift the stage2 compiler (compiled with stage 1).
let uplift_build_compiler = builder.compiler(1, build_compiler.host);
let msg = if uplift_build_compiler.host == target {
format!("Uplifting rustc (stage2 -> stage{stage})")
} else {
format!(
"Uplifting rustc (stage{}:{} -> stage{stage}:{target})",
rustc_to_uplift.stage, rustc_to_uplift.host,
"Uplifting rustc (stage2:{} -> stage{stage}:{target})",
uplift_build_compiler.host
)
};
builder.info(&msg);
builder.ensure(RustcLink::from_rustc(self, rustc_to_uplift));
return;
// Here the compiler that built the rlibs (`uplift_build_compiler`) can be different
// from the compiler whose sysroot should be modified in this step. So we need to copy
// the (previously built) rlibs into the correct sysroot.
builder.ensure(RustcLink::from_build_compiler_and_sysroot(
// This is the compiler that actually built the rustc rlibs
uplift_build_compiler,
// We copy the rlibs into the sysroot of `build_compiler`
build_compiler,
target,
self.crates,
));
// Here we have performed an uplift, so we return the actual build compiler that "built"
// this rustc.
return BuiltRustc { build_compiler: uplift_build_compiler };
}
// Build a standard library for the current host target using the `build_compiler`.
@ -1129,10 +1153,8 @@ impl Step for Rustc {
strip_debug(builder, target, &target_root_dir.join("rustc-main"));
}
builder.ensure(RustcLink::from_rustc(
self,
builder.compiler(build_compiler.stage, builder.config.host_target),
));
builder.ensure(RustcLink::from_rustc(self));
BuiltRustc { build_compiler }
}
fn metadata(&self) -> Option<StepMetadata> {
@ -1441,31 +1463,51 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
}
}
/// `RustcLink` copies all of the rlibs from the rustc build into the previous stage's sysroot.
/// `RustcLink` copies compiler rlibs from a rustc build into a compiler sysroot.
/// It works with (potentially up to) three compilers:
/// - `build_compiler` is a compiler that built rustc rlibs
/// - `sysroot_compiler` is a compiler into whose sysroot we will copy the rlibs
/// - In most situations, `build_compiler` == `sysroot_compiler`
/// - `target_compiler` is the compiler whose rlibs were built. It is not represented explicitly
/// in this step, rather we just read the rlibs from a rustc build stamp of `build_compiler`.
///
/// This is necessary for tools using `rustc_private`, where the previous compiler will build
/// a tool against the next compiler.
/// To build a tool against a compiler, the rlibs of that compiler that it links against
/// must be in the sysroot of the compiler that's doing the compiling.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct RustcLink {
/// The compiler whose rlibs we are copying around.
pub compiler: Compiler,
/// This is the compiler into whose sysroot we want to copy the rlibs into.
pub previous_stage_compiler: Compiler,
pub target: TargetSelection,
/// This compiler **built** some rustc, whose rlibs we will copy into a sysroot.
build_compiler: Compiler,
/// This is the compiler into whose sysroot we want to copy the built rlibs.
/// In most cases, it will correspond to `build_compiler`.
sysroot_compiler: Compiler,
target: TargetSelection,
/// Not actually used; only present to make sure the cache invalidation is correct.
crates: Vec<String>,
}
impl RustcLink {
fn from_rustc(rustc: Rustc, host_compiler: Compiler) -> Self {
/// Copy rlibs from the build compiler that build this `rustc` into the sysroot of that
/// build compiler.
fn from_rustc(rustc: Rustc) -> Self {
Self {
compiler: host_compiler,
previous_stage_compiler: rustc.build_compiler,
build_compiler: rustc.build_compiler,
sysroot_compiler: rustc.build_compiler,
target: rustc.target,
crates: rustc.crates,
}
}
/// Copy rlibs **built** by `build_compiler` into the sysroot of `sysroot_compiler`.
fn from_build_compiler_and_sysroot(
build_compiler: Compiler,
sysroot_compiler: Compiler,
target: TargetSelection,
crates: Vec<String>,
) -> Self {
Self { build_compiler, sysroot_compiler, target, crates }
}
}
impl Step for RustcLink {
@ -1477,14 +1519,14 @@ impl Step for RustcLink {
/// Same as `std_link`, only for librustc
fn run(self, builder: &Builder<'_>) {
let compiler = self.compiler;
let previous_stage_compiler = self.previous_stage_compiler;
let build_compiler = self.build_compiler;
let sysroot_compiler = self.sysroot_compiler;
let target = self.target;
add_to_sysroot(
builder,
&builder.sysroot_target_libdir(previous_stage_compiler, target),
&builder.sysroot_target_libdir(previous_stage_compiler, compiler.host),
&build_stamp::librustc_stamp(builder, compiler, target),
&builder.sysroot_target_libdir(sysroot_compiler, target),
&builder.sysroot_target_libdir(sysroot_compiler, sysroot_compiler.host),
&build_stamp::librustc_stamp(builder, build_compiler, target),
);
}
}
@ -1918,7 +1960,7 @@ impl Step for Sysroot {
/// linker wrappers (LLD, LLVM bitcode linker, etc.).
///
/// This will assemble a compiler in `build/$target/stage$stage`.
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Assemble {
/// The compiler which we will produce in this step. Assemble itself will
/// take care of ensuring that the necessary prerequisites to do so exist,
@ -2099,7 +2141,10 @@ impl Step for Assemble {
"target_compiler.host" = ?target_compiler.host,
"building compiler libraries to link to"
);
builder.ensure(Rustc::new(build_compiler, target_compiler.host));
// It is possible that an uplift has happened, so we override build_compiler here.
let BuiltRustc { build_compiler } =
builder.ensure(Rustc::new(build_compiler, target_compiler.host));
let stage = target_compiler.stage;
let host = target_compiler.host;
@ -2286,6 +2331,7 @@ impl Step for Assemble {
///
/// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided.
#[track_caller]
pub fn add_to_sysroot(
builder: &Builder<'_>,
sysroot_dst: &Path,
@ -2568,7 +2614,17 @@ pub fn strip_debug(builder: &Builder<'_>, target: TargetSelection, path: &Path)
}
let previous_mtime = t!(t!(path.metadata()).modified());
command("strip").arg("--strip-debug").arg(path).run_capture(builder);
let stamp = BuildStamp::new(path.parent().unwrap())
.with_prefix(path.file_name().unwrap().to_str().unwrap())
.with_prefix("strip")
.add_stamp(previous_mtime.duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos());
// Running strip can be relatively expensive (~1s on librustc_driver.so), so we don't rerun it
// if the file is unchanged.
if !stamp.is_up_to_date() {
command("strip").arg("--strip-debug").arg(path).run_capture(builder);
}
t!(stamp.write());
let file = t!(fs::File::open(path));

View file

@ -54,7 +54,7 @@ fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool {
builder.config.tools.as_ref().is_none_or(|tools| tools.contains(tool))
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Docs {
pub host: TargetSelection,
}
@ -91,7 +91,7 @@ impl Step for Docs {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct JsonDocs {
build_compiler: Compiler,
target: TargetSelection,
@ -354,7 +354,7 @@ fn get_cc_search_dirs(
(bin_path, lib_path)
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Mingw {
pub host: TargetSelection,
}
@ -394,7 +394,7 @@ impl Step for Mingw {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
pub compiler: Compiler,
}
@ -730,7 +730,7 @@ fn copy_target_libs(
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub compiler: Compiler,
pub target: TargetSelection,
@ -785,7 +785,7 @@ impl Step for Std {
/// `rust.download-rustc`.
///
/// (Don't confuse this with [`RustDev`], without the `c`!)
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcDev {
pub compiler: Compiler,
pub target: TargetSelection,
@ -916,6 +916,12 @@ fn copy_src_dirs(
exclude_dirs: &[&str],
dst_dir: &Path,
) {
// The src directories should be relative to `base`, we depend on them not being absolute
// paths below.
for src_dir in src_dirs {
assert!(Path::new(src_dir).is_relative());
}
// Iterating, filtering and copying a large number of directories can be quite slow.
// Avoid doing it in dry run (and thus also tests).
if builder.config.dry_run() {
@ -923,6 +929,7 @@ fn copy_src_dirs(
}
fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
// The paths are relative, e.g. `llvm-project/...`.
let spath = match path.to_str() {
Some(path) => path,
None => return false,
@ -930,65 +937,53 @@ fn copy_src_dirs(
if spath.ends_with('~') || spath.ends_with(".pyc") {
return false;
}
// Normalize slashes
let spath = spath.replace("\\", "/");
const LLVM_PROJECTS: &[&str] = &[
static LLVM_PROJECTS: &[&str] = &[
"llvm-project/clang",
"llvm-project\\clang",
"llvm-project/libunwind",
"llvm-project\\libunwind",
"llvm-project/lld",
"llvm-project\\lld",
"llvm-project/lldb",
"llvm-project\\lldb",
"llvm-project/llvm",
"llvm-project\\llvm",
"llvm-project/compiler-rt",
"llvm-project\\compiler-rt",
"llvm-project/cmake",
"llvm-project\\cmake",
"llvm-project/runtimes",
"llvm-project\\runtimes",
"llvm-project/third-party",
"llvm-project\\third-party",
];
if spath.contains("llvm-project")
&& !spath.ends_with("llvm-project")
&& !LLVM_PROJECTS.iter().any(|path| spath.contains(path))
{
return false;
}
if spath.starts_with("llvm-project") && spath != "llvm-project" {
if !LLVM_PROJECTS.iter().any(|path| spath.starts_with(path)) {
return false;
}
// Keep only these third party libraries
const LLVM_THIRD_PARTY: &[&str] =
&["llvm-project/third-party/siphash", "llvm-project\\third-party\\siphash"];
if (spath.starts_with("llvm-project/third-party")
|| spath.starts_with("llvm-project\\third-party"))
&& !(spath.ends_with("llvm-project/third-party")
|| spath.ends_with("llvm-project\\third-party"))
&& !LLVM_THIRD_PARTY.iter().any(|path| spath.contains(path))
{
return false;
}
// Keep siphash third-party dependency
if spath.starts_with("llvm-project/third-party")
&& spath != "llvm-project/third-party"
&& !spath.starts_with("llvm-project/third-party/siphash")
{
return false;
}
const LLVM_TEST: &[&str] = &["llvm-project/llvm/test", "llvm-project\\llvm\\test"];
if LLVM_TEST.iter().any(|path| spath.contains(path))
&& (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s"))
{
return false;
if spath.starts_with("llvm-project/llvm/test")
&& (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s"))
{
return false;
}
}
// Cargo tests use some files like `.gitignore` that we would otherwise exclude.
const CARGO_TESTS: &[&str] = &["tools/cargo/tests", "tools\\cargo\\tests"];
if CARGO_TESTS.iter().any(|path| spath.contains(path)) {
if spath.starts_with("tools/cargo/tests") {
return true;
}
let full_path = Path::new(dir).join(path);
if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) {
return false;
if !exclude_dirs.is_empty() {
let full_path = Path::new(dir).join(path);
if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) {
return false;
}
}
let excludes = [
static EXCLUDES: &[&str] = &[
"CVS",
"RCS",
"SCCS",
@ -1011,7 +1006,15 @@ fn copy_src_dirs(
".hgrags",
"_darcs",
];
!path.iter().map(|s| s.to_str().unwrap()).any(|s| excludes.contains(&s))
// We want to check if any component of `path` doesn't contain the strings in `EXCLUDES`.
// However, since we traverse directories top-down in `Builder::cp_link_filtered`,
// it is enough to always check only the last component:
// - If the path is a file, we will iterate to it and then check it's filename
// - If the path is a dir, if it's dir name contains an excluded string, we will not even
// recurse into it.
let last_component = path.iter().next_back().map(|s| s.to_str().unwrap()).unwrap();
!EXCLUDES.contains(&last_component)
}
// Copy the directories using our filter
@ -1023,7 +1026,7 @@ fn copy_src_dirs(
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Src;
impl Step for Src {
@ -1084,7 +1087,7 @@ impl Step for Src {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct PlainSourceTarball;
impl Step for PlainSourceTarball {
@ -1230,7 +1233,7 @@ impl Step for PlainSourceTarball {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub build_compiler: Compiler,
pub target: TargetSelection,
@ -1284,7 +1287,7 @@ impl Step for Cargo {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzer {
pub build_compiler: Compiler,
pub target: TargetSelection,
@ -1560,7 +1563,7 @@ impl Step for Rustfmt {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Extended {
stage: u32,
host: TargetSelection,
@ -2401,7 +2404,7 @@ impl Step for LlvmTools {
/// Distributes the `llvm-bitcode-linker` tool so that it can be used by a compiler whose host
/// is `target`.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct LlvmBitcodeLinker {
/// The linker will be compiled by this compiler.
pub build_compiler: Compiler,

View file

@ -580,7 +580,7 @@ impl Step for SharedAssets {
}
/// Document the standard library using `build_compiler`.
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Std {
build_compiler: Compiler,
target: TargetSelection,
@ -715,7 +715,7 @@ impl Step for Std {
/// or remote link.
const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum DocumentationFormat {
Html,
Json,
@ -1230,7 +1230,7 @@ fn symlink_dir_force(config: &Config, original: &Path, link: &Path) {
}
/// Builds the Rust compiler book.
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcBook {
build_compiler: Compiler,
target: TargetSelection,
@ -1334,7 +1334,7 @@ impl Step for RustcBook {
/// Documents the reference.
/// It has to always be done using a stage 1+ compiler, because it references in-tree
/// compiler/stdlib concepts.
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Reference {
build_compiler: Compiler,
target: TargetSelection,

View file

@ -17,7 +17,7 @@ use crate::core::config::flags::get_completion;
use crate::utils::exec::command;
use crate::{Mode, t};
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct BuildManifest;
impl Step for BuildManifest {
@ -56,7 +56,7 @@ impl Step for BuildManifest {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct BumpStage0;
impl Step for BumpStage0 {
@ -78,7 +78,7 @@ impl Step for BumpStage0 {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct ReplaceVersionPlaceholder;
impl Step for ReplaceVersionPlaceholder {
@ -169,7 +169,7 @@ impl Step for Miri {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CollectLicenseMetadata;
impl Step for CollectLicenseMetadata {
@ -200,7 +200,7 @@ impl Step for CollectLicenseMetadata {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct GenerateCopyright;
impl Step for GenerateCopyright {
@ -264,7 +264,7 @@ impl Step for GenerateCopyright {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct GenerateWindowsSys;
impl Step for GenerateWindowsSys {
@ -326,7 +326,7 @@ impl Step for GenerateCompletions {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct UnicodeTableGenerator;
impl Step for UnicodeTableGenerator {
@ -348,7 +348,7 @@ impl Step for UnicodeTableGenerator {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct FeaturesStatusDump;
impl Step for FeaturesStatusDump {
@ -408,7 +408,7 @@ impl Step for CyclicStep {
///
/// The coverage-dump tool is an internal detail of coverage tests, so this run
/// step is only needed when testing coverage-dump manually.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CoverageDump;
impl Step for CoverageDump {

View file

@ -2732,7 +2732,7 @@ fn prepare_cargo_test(
/// FIXME(Zalathar): Try to split this into two separate steps: a user-visible
/// step for testing standard library crates, and an internal step used for both
/// library crates and compiler crates.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Crate {
pub compiler: Compiler,
pub target: TargetSelection,
@ -3747,7 +3747,7 @@ impl Step for TestFloatParse {
/// Runs the tool `src/tools/collect-license-metadata` in `ONLY_CHECK=1` mode,
/// which verifies that `license-metadata.json` is up-to-date and therefore
/// running the tool normally would not update anything.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CollectLicenseMetadata;
impl Step for CollectLicenseMetadata {

View file

@ -101,6 +101,7 @@ pub struct Cargo {
impl Cargo {
/// Calls [`Builder::cargo`] and [`Cargo::configure_linker`] to prepare an invocation of `cargo`
/// to be run.
#[track_caller]
pub fn new(
builder: &Builder<'_>,
compiler: Compiler,
@ -139,6 +140,7 @@ impl Cargo {
/// Same as [`Cargo::new`] except this one doesn't configure the linker with
/// [`Cargo::configure_linker`].
#[track_caller]
pub fn new_for_mir_opt_tests(
builder: &Builder<'_>,
compiler: Compiler,
@ -186,6 +188,32 @@ impl Cargo {
self
}
/// Append a value to an env var of the cargo command instance.
/// If the variable was unset previously, this is equivalent to [`Cargo::env`].
/// If the variable was already set, this will append `delimiter` and then `value` to it.
///
/// Note that this only considers the existence of the env. var. configured on this `Cargo`
/// instance. It does not look at the environment of this process.
pub fn append_to_env(
&mut self,
key: impl AsRef<OsStr>,
value: impl AsRef<OsStr>,
delimiter: impl AsRef<OsStr>,
) -> &mut Cargo {
assert_ne!(key.as_ref(), "RUSTFLAGS");
assert_ne!(key.as_ref(), "RUSTDOCFLAGS");
let key = key.as_ref();
if let Some((_, Some(previous_value))) = self.command.get_envs().find(|(k, _)| *k == key) {
let mut combined: OsString = previous_value.to_os_string();
combined.push(delimiter.as_ref());
combined.push(value.as_ref());
self.env(key, combined)
} else {
self.env(key, value)
}
}
pub fn add_rustc_lib_path(&mut self, builder: &Builder<'_>) {
builder.add_rustc_lib_path(self.compiler, &mut self.command);
}
@ -396,6 +424,7 @@ impl From<Cargo> for BootstrapCommand {
impl Builder<'_> {
/// Like [`Builder::cargo`], but only passes flags that are valid for all commands.
#[track_caller]
pub fn bare_cargo(
&self,
compiler: Compiler,
@ -480,6 +509,7 @@ impl Builder<'_> {
/// scoped by `mode`'s output directory, it will pass the `--target` flag for the specified
/// `target`, and will be executing the Cargo command `cmd`. `cmd` can be `miri-cmd` for
/// commands to be run with Miri.
#[track_caller]
fn cargo(
&self,
compiler: Compiler,

View file

@ -1569,7 +1569,7 @@ mod snapshot {
[build] llvm <host>
[build] rustc 0 <host> -> rustc 1 <host>
[build] rustc 1 <host> -> std 1 <host>
[build] rustc 1 <host> -> std 1 <target1>
[check] rustc 1 <host> -> std 1 <target1>
[check] rustc 1 <host> -> rustc 2 <target1> (73 crates)
[check] rustc 1 <host> -> rustc 2 <target1>
[check] rustc 1 <host> -> Rustdoc 2 <target1>

View file

@ -324,7 +324,7 @@ impl FromStr for LlvmLibunwind {
}
}
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SplitDebuginfo {
Packed,
Unpacked,

View file

@ -14,7 +14,7 @@ pub struct TargetSelection {
}
/// Newtype over `Vec<TargetSelection>` so we can implement custom parsing logic
#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[derive(Clone, Default, PartialEq, Eq, Hash, Debug)]
pub struct TargetSelectionList(pub Vec<TargetSelection>);
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {

View file

@ -279,7 +279,7 @@ pub enum DependencyType {
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
pub enum Mode {
/// Build the standard library, placing output in the "stageN-std" directory.
Std,
@ -357,7 +357,7 @@ pub enum RemapScheme {
NonCompiler,
}
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
pub enum CLang {
C,
Cxx,
@ -1743,6 +1743,7 @@ impl Build {
///
/// If `src` is a symlink, `src` will be resolved to the actual path
/// and copied to `dst` instead of the symlink itself.
#[track_caller]
pub fn resolve_symlink_and_copy(&self, src: &Path, dst: &Path) {
self.copy_link_internal(src, dst, true);
}
@ -1751,6 +1752,7 @@ impl Build {
/// Attempts to use hard links if possible, falling back to copying.
/// You can neither rely on this being a copy nor it being a link,
/// so do not write to dst.
#[track_caller]
pub fn copy_link(&self, src: &Path, dst: &Path, file_type: FileType) {
self.copy_link_internal(src, dst, false);
@ -1765,6 +1767,7 @@ impl Build {
}
}
#[track_caller]
fn copy_link_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
if self.config.dry_run() {
return;
@ -1773,6 +1776,10 @@ impl Build {
if src == dst {
return;
}
#[cfg(feature = "tracing")]
let _span = trace_io!("file-copy-link", ?src, ?dst);
if let Err(e) = fs::remove_file(dst)
&& cfg!(windows)
&& e.kind() != io::ErrorKind::NotFound
@ -1815,6 +1822,7 @@ impl Build {
/// Links the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called.
/// Will attempt to use hard links if possible and fall back to copying.
#[track_caller]
pub fn cp_link_r(&self, src: &Path, dst: &Path) {
if self.config.dry_run() {
return;
@ -1837,12 +1845,14 @@ impl Build {
/// Will attempt to use hard links if possible and fall back to copying.
/// Unwanted files or directories can be skipped
/// by returning `false` from the filter function.
#[track_caller]
pub fn cp_link_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
// Immediately recurse with an empty relative path
self.cp_link_filtered_recurse(src, dst, Path::new(""), filter)
}
// Inner function does the actual work
#[track_caller]
fn cp_link_filtered_recurse(
&self,
src: &Path,
@ -1862,7 +1872,6 @@ impl Build {
self.create_dir(&dst);
self.cp_link_filtered_recurse(&path, &dst, &relative, filter);
} else {
let _ = fs::remove_file(&dst);
self.copy_link(&path, &dst, FileType::Regular);
}
}
@ -1904,10 +1913,15 @@ impl Build {
t!(fs::read_to_string(path))
}
#[track_caller]
fn create_dir(&self, dir: &Path) {
if self.config.dry_run() {
return;
}
#[cfg(feature = "tracing")]
let _span = trace_io!("dir-create", ?dir);
t!(fs::create_dir_all(dir))
}
@ -1915,6 +1929,10 @@ impl Build {
if self.config.dry_run() {
return;
}
#[cfg(feature = "tracing")]
let _span = trace_io!("dir-remove", ?dir);
t!(fs::remove_dir_all(dir))
}

View file

@ -48,6 +48,29 @@ macro_rules! error {
}
}
#[cfg(feature = "tracing")]
pub const IO_SPAN_TARGET: &str = "IO";
/// Create a tracing span around an I/O operation, if tracing is enabled.
/// Note that at least one tracing value field has to be passed to this macro, otherwise it will not
/// compile.
#[macro_export]
macro_rules! trace_io {
($name:expr, $($args:tt)*) => {
::tracing::trace_span!(
target: $crate::utils::tracing::IO_SPAN_TARGET,
$name,
$($args)*,
location = $crate::utils::tracing::format_location(*::std::panic::Location::caller())
).entered()
}
}
#[cfg(feature = "tracing")]
pub fn format_location(location: std::panic::Location<'static>) -> String {
format!("{}:{}", location.file(), location.line())
}
#[cfg(feature = "tracing")]
const COMMAND_SPAN_TARGET: &str = "COMMAND";
@ -55,7 +78,7 @@ const COMMAND_SPAN_TARGET: &str = "COMMAND";
pub fn trace_cmd(command: &crate::BootstrapCommand) -> tracing::span::EnteredSpan {
let fingerprint = command.fingerprint();
let location = command.get_created_location();
let location = format!("{}:{}", location.file(), location.line());
let location = format_location(location);
tracing::span!(
target: COMMAND_SPAN_TARGET,
@ -84,6 +107,7 @@ mod inner {
use std::fmt::Debug;
use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::atomic::Ordering;
use chrono::{DateTime, Utc};
@ -93,8 +117,8 @@ mod inner {
use tracing_subscriber::registry::{LookupSpan, SpanRef};
use tracing_subscriber::{EnvFilter, Layer};
use super::{COMMAND_SPAN_TARGET, IO_SPAN_TARGET};
use crate::STEP_SPAN_TARGET;
use crate::utils::tracing::COMMAND_SPAN_TARGET;
pub fn setup_tracing(env_name: &str) -> TracingGuard {
let filter = EnvFilter::from_env(env_name);
@ -291,6 +315,23 @@ mod inner {
Ok(())
}
// Write fields while treating the "location" field specially, and assuming that it
// contains the source file location relevant to the span.
let write_with_location = |writer: &mut W| -> std::io::Result<()> {
if let Some(values) = field_values {
write_fields(
writer,
values.fields.iter().filter(|(name, _)| *name != "location"),
)?;
let location =
&values.fields.iter().find(|(name, _)| *name == "location").unwrap().1;
let (filename, line) = location.rsplit_once(':').unwrap();
let filename = shorten_filename(filename);
write!(writer, " ({filename}:{line})",)?;
}
Ok(())
};
// We handle steps specially. We instrument them dynamically in `Builder::ensure`,
// and we want to have custom name for each step span. But tracing doesn't allow setting
// dynamic span names. So we detect step spans here and override their name.
@ -311,17 +352,11 @@ mod inner {
// Executed command
COMMAND_SPAN_TARGET => {
write!(writer, "{}", span.name())?;
if let Some(values) = field_values {
write_fields(
writer,
values.fields.iter().filter(|(name, _)| *name != "location"),
)?;
write!(
writer,
" ({})",
values.fields.iter().find(|(name, _)| *name == "location").unwrap().1
)?;
}
write_with_location(writer)?;
}
IO_SPAN_TARGET => {
write!(writer, "{}", span.name())?;
write_with_location(writer)?;
}
// Other span
_ => {
@ -342,21 +377,10 @@ mod inner {
writer: &mut W,
metadata: &'static tracing::Metadata<'static>,
) -> std::io::Result<()> {
use std::path::{Path, PathBuf};
if let Some(filename) = metadata.file() {
// Keep only the module name and file name to make it shorter
let filename: PathBuf = Path::new(filename)
.components()
// Take last two path components
.rev()
.take(2)
.collect::<Vec<_>>()
.into_iter()
.rev()
.collect();
let filename = shorten_filename(filename);
write!(writer, " ({}", filename.display())?;
write!(writer, " ({filename}")?;
if let Some(line) = metadata.line() {
write!(writer, ":{line}")?;
}
@ -365,6 +389,21 @@ mod inner {
Ok(())
}
/// Keep only the module name and file name to make it shorter
fn shorten_filename(filename: &str) -> String {
Path::new(filename)
.components()
// Take last two path components
.rev()
.take(2)
.collect::<Vec<_>>()
.into_iter()
.rev()
.collect::<PathBuf>()
.display()
.to_string()
}
impl<S> Layer<S> for TracingPrinter
where
S: Subscriber,

View file

@ -3,6 +3,8 @@ use std::io::{BufRead, BufReader};
use std::path::Path;
use std::process::Command;
use crate::ci::CiEnv;
/// Invokes `build_helper::util::detail_exit` with `cfg!(test)`
///
/// This is a macro instead of a function so that it uses `cfg(test)` in the *calling* crate, not in build helper.
@ -20,6 +22,15 @@ pub fn detail_exit(code: i32, is_test: bool) -> ! {
if is_test {
panic!("status code: {code}");
} else {
// If we're in CI, print the current bootstrap invocation command, to make it easier to
// figure out what exactly has failed.
if CiEnv::is_ci() {
// Skip the first argument, as it will be some absolute path to the bootstrap binary.
let bootstrap_args =
std::env::args().skip(1).map(|a| a.to_string()).collect::<Vec<_>>().join(" ");
eprintln!("Bootstrap failed while executing `{bootstrap_args}`");
}
// otherwise, exit with provided status code
std::process::exit(code);
}

View file

@ -45,4 +45,4 @@ RUN bash -c 'npm install -g eslint@$(cat /tmp/eslint.version)'
# NOTE: intentionally uses python2 for x.py so we can test it still works.
# validate-toolstate only runs in our CI, so it's ok for it to only support python3.
ENV SCRIPT TIDY_PRINT_DIFF=1 python2.7 ../x.py test --stage 0 \
src/tools/tidy tidyselftest --extra-checks=py,cpp,js
src/tools/tidy tidyselftest --extra-checks=py,cpp,js,spellcheck

View file

@ -1 +1 @@
8.6.0
8.57.1

View file

@ -61,12 +61,27 @@ def read_pid_from_file() -> int:
) from e
def main() -> int:
pid = read_pid_from_file()
def wait_for_process(pid: int):
timeout_duration_seconds = 5 * 60
interval_seconds = 3
max_attempts = timeout_duration_seconds / interval_seconds
attempts = 0
# Poll until process exits
while is_process_running(pid):
time.sleep(3)
if attempts >= max_attempts:
print(
"::warning::Timeout expired while waiting for the disk cleanup process to finish."
)
break
time.sleep(interval_seconds)
attempts += 1
def main() -> int:
pid = read_pid_from_file()
wait_for_process(pid)
print_logs()

View file

@ -81,9 +81,11 @@ There are two orthogonal ways to control which kind of tracing logs you want:
- If you select a level, all events/spans with an equal or higher priority level will be shown.
2. You can also control the log **target**, e.g. `bootstrap` or `bootstrap::core::config` or a custom target like `CONFIG_HANDLING` or `STEP`.
- Custom targets are used to limit what kinds of spans you are interested in, as the `BOOTSTRAP_TRACING=trace` output can be quite verbose. Currently, you can use the following custom targets:
- `CONFIG_HANDLING`: show spans related to config handling
- `STEP`: show all executed steps. Note that executed commands have `info` event level.
- `COMMAND`: show all executed commands. Note that executed commands have `trace` event level.
- `CONFIG_HANDLING`: show spans related to config handling.
- `STEP`: show all executed steps. Executed commands have `info` event level.
- `COMMAND`: show all executed commands. Executed commands have `trace` event level.
- `IO`: show performed I/O operations. Executed commands have `trace` event level.
- Note that many I/O are currently not being traced.
You can of course combine them (custom target logs are typically gated behind `TRACE` log level additionally):

View file

@ -45,7 +45,7 @@ implementation:
[marked][sanitizer-attribute] with appropriate LLVM attribute:
`SanitizeAddress`, `SanitizeHWAddress`, `SanitizeMemory`, or
`SanitizeThread`. By default all functions are instrumented, but this
behaviour can be changed with `#[no_sanitize(...)]`.
behaviour can be changed with `#[sanitize(xyz = "on|off")]`.
* The decision whether to perform instrumentation or not is possible only at a
function granularity. In the cases were those decision differ between

View file

@ -375,12 +375,12 @@ linking time. It takes one of the following values:
* `y`, `yes`, `on`, `true`, `fat`, or no value: perform "fat" LTO which attempts to
perform optimizations across all crates within the dependency graph.
* `n`, `no`, `off`, `false`: disables LTO.
* `thin`: perform ["thin"
LTO](http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html).
This is similar to "fat", but takes substantially less time to run while
still achieving performance gains similar to "fat".
For larger projects like the Rust compiler, ThinLTO can even result in better performance than fat LTO.
* `n`, `no`, `off`, `false`: disables LTO.
If `-C lto` is not specified, then the compiler will attempt to perform "thin
local LTO" which performs "thin" LTO on the local crate only across its

View file

@ -16,7 +16,7 @@ system.
## Target maintainers
[@Amanieu](https://github.com/Amanieu)
[@lubinglun](https://github.com/lubinglun)
[@cceerczw](https://github.com/cceerczw)
## Requirements

View file

@ -534,10 +534,10 @@ tests, and will reject patches that fail to build or pass the testsuite on a
target. We hold tier 1 targets to our highest standard of requirements.
A proposed new tier 1 target must be reviewed and approved by the compiler team
based on these requirements. In addition, the release team must approve the
viability and value of supporting the target. For a tier 1 target, this will
based on these requirements. In addition, the infra team must approve the
viability of supporting the target. For a tier 1 target, this will
typically take place via a full RFC proposing the target, to be jointly
reviewed and approved by the compiler team and release team.
reviewed and approved by the compiler team and infra team.
In addition, the infrastructure team must approve the integration of the target
into Continuous Integration (CI), and the tier 1 CI-related requirements. This
@ -617,7 +617,7 @@ including the infrastructure team in the RFC proposing the target.
A tier 1 target may be demoted if it no longer meets these requirements but
still meets the requirements for a lower tier. Any proposal for demotion of a
tier 1 target requires a full RFC process, with approval by the compiler and
release teams. Any such proposal will be communicated widely to the Rust
infra teams. Any such proposal will be communicated widely to the Rust
community, both when initially proposed and before being dropped from a stable
release. A tier 1 target is highly unlikely to be directly removed without
first being demoted to tier 2 or tier 3. (The amount of time between such
@ -628,7 +628,7 @@ planned and scheduled action.)
Raising the baseline expectations of a tier 1 target (such as the minimum CPU
features or OS version required) requires the approval of the compiler and
release teams, and should be widely communicated as well, but does not
infra teams, and should be widely communicated as well, but does not
necessarily require a full RFC.
### Tier 1 with host tools
@ -638,11 +638,11 @@ host (such as `rustc` and `cargo`). This allows the target to be used as a
development platform, not just a compilation target.
A proposed new tier 1 target with host tools must be reviewed and approved by
the compiler team based on these requirements. In addition, the release team
must approve the viability and value of supporting host tools for the target.
the compiler team based on these requirements. In addition, the infra team
must approve the viability of supporting host tools for the target.
For a tier 1 target, this will typically take place via a full RFC proposing
the target, to be jointly reviewed and approved by the compiler team and
release team.
infra team.
In addition, the infrastructure team must approve the integration of the
target's host tools into Continuous Integration (CI), and the CI-related
@ -697,7 +697,7 @@ target with host tools may be demoted (including having its host tools dropped,
or being demoted to tier 2 with host tools) if it no longer meets these
requirements but still meets the requirements for a lower tier. Any proposal
for demotion of a tier 1 target (with or without host tools) requires a full
RFC process, with approval by the compiler and release teams. Any such proposal
RFC process, with approval by the compiler and infra teams. Any such proposal
will be communicated widely to the Rust community, both when initially proposed
and before being dropped from a stable release.

View file

@ -0,0 +1,19 @@
# `indirect-branch-cs-prefix`
The tracking issue for this feature is: https://github.com/rust-lang/rust/issues/116852.
------------------------
Option `-Zindirect-branch-cs-prefix` controls whether a `cs` prefix is added to
`call` and `jmp` to indirect thunks.
It is equivalent to [Clang]'s and [GCC]'s `-mindirect-branch-cs-prefix`. The
Linux kernel uses it for RETPOLINE builds. For details, see
[LLVM commit 6f867f910283] ("[X86] Support ``-mindirect-branch-cs-prefix`` for
call and jmp to indirect thunk") which introduces the feature.
Only x86 and x86_64 are supported.
[Clang]: https://clang.llvm.org/docs/ClangCommandLineReference.html#cmdoption-clang-mindirect-branch-cs-prefix
[GCC]: https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html#index-mindirect-branch-cs-prefix
[LLVM commit 6f867f910283]: https://github.com/llvm/llvm-project/commit/6f867f9102838ebe314c1f3661fdf95700386e5a

View file

@ -1,29 +0,0 @@
# `no_sanitize`
The tracking issue for this feature is: [#39699]
[#39699]: https://github.com/rust-lang/rust/issues/39699
------------------------
The `no_sanitize` attribute can be used to selectively disable sanitizer
instrumentation in an annotated function. This might be useful to: avoid
instrumentation overhead in a performance critical function, or avoid
instrumenting code that contains constructs unsupported by given sanitizer.
The precise effect of this annotation depends on particular sanitizer in use.
For example, with `no_sanitize(thread)`, the thread sanitizer will no longer
instrument non-atomic store / load operations, but it will instrument atomic
operations to avoid reporting false positives and provide meaning full stack
traces.
## Examples
``` rust
#![feature(no_sanitize)]
#[no_sanitize(address)]
fn foo() {
// ...
}
```

View file

@ -0,0 +1,73 @@
# `sanitize`
The tracking issue for this feature is: [#39699]
[#39699]: https://github.com/rust-lang/rust/issues/39699
------------------------
The `sanitize` attribute can be used to selectively disable or enable sanitizer
instrumentation in an annotated function. This might be useful to: avoid
instrumentation overhead in a performance critical function, or avoid
instrumenting code that contains constructs unsupported by given sanitizer.
The precise effect of this annotation depends on particular sanitizer in use.
For example, with `sanitize(thread = "off")`, the thread sanitizer will no
longer instrument non-atomic store / load operations, but it will instrument
atomic operations to avoid reporting false positives and provide meaning full
stack traces.
This attribute was previously named `no_sanitize`.
## Examples
``` rust
#![feature(sanitize)]
#[sanitize(address = "off")]
fn foo() {
// ...
}
```
It is also possible to disable sanitizers for entire modules and enable them
for single items or functions.
```rust
#![feature(sanitize)]
#[sanitize(address = "off")]
mod foo {
fn unsanitized() {
// ...
}
#[sanitize(address = "on")]
fn sanitized() {
// ...
}
}
```
It's also applicable to impl blocks.
```rust
#![feature(sanitize)]
trait MyTrait {
fn foo(&self);
fn bar(&self);
}
#[sanitize(address = "off")]
impl MyTrait for () {
fn foo(&self) {
// ...
}
#[sanitize(address = "on")]
fn bar(&self) {
// ...
}
}
```

View file

@ -15,6 +15,7 @@ import os.path
import re
import shlex
from collections import namedtuple
from pathlib import Path
try:
from html.parser import HTMLParser
@ -242,6 +243,11 @@ class CachedFiles(object):
return self.last_path
def get_absolute_path(self, path):
if "*" in path:
paths = list(Path(self.root).glob(path))
if len(paths) != 1:
raise FailedCheck("glob path does not resolve to one file")
path = str(paths[0])
return os.path.join(self.root, path)
def get_file(self, path):

View file

@ -21,6 +21,7 @@ rustdoc-json-types = { path = "../rustdoc-json-types" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
smallvec = "1.8.1"
stringdex = { version = "0.0.1-alpha4" }
tempfile = "3"
threadpool = "1.8.1"
tracing = "0.1"

View file

@ -10,6 +10,7 @@ fn main() {
"static/css/normalize.css",
"static/js/main.js",
"static/js/search.js",
"static/js/stringdex.js",
"static/js/settings.js",
"static/js/src-script.js",
"static/js/storage.js",

View file

@ -1552,10 +1552,10 @@ impl Type {
matches!(self, Type::Path { path: Path { res: Res::Def(DefKind::TyAlias, _), .. } })
}
/// Check if two types are "the same" for documentation purposes.
/// Check if this type is a subtype of another type for documentation purposes.
///
/// This is different from `Eq`, because it knows that things like
/// `Placeholder` are possible matches for everything.
/// `Infer` and generics have special subtyping rules.
///
/// This relation is not commutative when generics are involved:
///
@ -1566,8 +1566,8 @@ impl Type {
/// let cache = Cache::new(false);
/// let generic = Type::Generic(rustc_span::symbol::sym::Any);
/// let unit = Type::Primitive(PrimitiveType::Unit);
/// assert!(!generic.is_same(&unit, &cache));
/// assert!(unit.is_same(&generic, &cache));
/// assert!(!generic.is_doc_subtype_of(&unit, &cache));
/// assert!(unit.is_doc_subtype_of(&generic, &cache));
/// ```
///
/// An owned type is also the same as its borrowed variants (this is commutative),

View file

@ -1,6 +1,5 @@
use std::mem;
use rustc_ast::join_path_syms;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_hir::StabilityLevel;
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
@ -48,7 +47,7 @@ pub(crate) struct Cache {
/// Similar to `paths`, but only holds external paths. This is only used for
/// generating explicit hyperlinks to other crates.
pub(crate) external_paths: FxHashMap<DefId, (Vec<Symbol>, ItemType)>,
pub(crate) external_paths: FxIndexMap<DefId, (Vec<Symbol>, ItemType)>,
/// Maps local `DefId`s of exported types to fully qualified paths.
/// Unlike 'paths', this mapping ignores any renames that occur
@ -574,7 +573,6 @@ fn add_item_to_search_index(tcx: TyCtxt<'_>, cache: &mut Cache, item: &clean::It
clean::ItemKind::ImportItem(import) => import.source.did.unwrap_or(item_def_id),
_ => item_def_id,
};
let path = join_path_syms(parent_path);
let impl_id = if let Some(ParentStackItem::Impl { item_id, .. }) = cache.parent_stack.last() {
item_id.as_def_id()
} else {
@ -593,11 +591,11 @@ fn add_item_to_search_index(tcx: TyCtxt<'_>, cache: &mut Cache, item: &clean::It
ty: item.type_(),
defid: Some(defid),
name,
path,
module_path: parent_path.to_vec(),
desc,
parent: parent_did,
parent_idx: None,
exact_path: None,
exact_module_path: None,
impl_id,
search_type,
aliases,

View file

@ -4,7 +4,7 @@ use std::fmt;
use rustc_hir::def::{CtorOf, DefKind, MacroKinds};
use rustc_span::hygiene::MacroKind;
use serde::{Serialize, Serializer};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use crate::clean;
@ -68,6 +68,52 @@ impl Serialize for ItemType {
}
}
impl<'de> Deserialize<'de> for ItemType {
fn deserialize<D>(deserializer: D) -> Result<ItemType, D::Error>
where
D: Deserializer<'de>,
{
struct ItemTypeVisitor;
impl<'de> de::Visitor<'de> for ItemTypeVisitor {
type Value = ItemType;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "an integer between 0 and 25")
}
fn visit_u64<E: de::Error>(self, v: u64) -> Result<ItemType, E> {
Ok(match v {
0 => ItemType::Keyword,
1 => ItemType::Primitive,
2 => ItemType::Module,
3 => ItemType::ExternCrate,
4 => ItemType::Import,
5 => ItemType::Struct,
6 => ItemType::Enum,
7 => ItemType::Function,
8 => ItemType::TypeAlias,
9 => ItemType::Static,
10 => ItemType::Trait,
11 => ItemType::Impl,
12 => ItemType::TyMethod,
13 => ItemType::Method,
14 => ItemType::StructField,
15 => ItemType::Variant,
16 => ItemType::Macro,
17 => ItemType::AssocType,
18 => ItemType::Constant,
19 => ItemType::AssocConst,
20 => ItemType::Union,
21 => ItemType::ForeignType,
23 => ItemType::ProcAttribute,
24 => ItemType::ProcDerive,
25 => ItemType::TraitAlias,
_ => return Err(E::missing_field("unknown number")),
})
}
}
deserializer.deserialize_any(ItemTypeVisitor)
}
}
impl<'a> From<&'a clean::Item> for ItemType {
fn from(item: &'a clean::Item) -> ItemType {
let kind = match &item.kind {
@ -198,6 +244,10 @@ impl ItemType {
pub(crate) fn is_adt(&self) -> bool {
matches!(self, ItemType::Struct | ItemType::Union | ItemType::Enum)
}
/// Keep this the same as isFnLikeTy in search.js
pub(crate) fn is_fn_like(&self) -> bool {
matches!(self, ItemType::Function | ItemType::Method | ItemType::TyMethod)
}
}
impl fmt::Display for ItemType {

View file

@ -27,6 +27,7 @@ pub(crate) struct Layout {
pub(crate) struct Page<'a> {
pub(crate) title: &'a str,
pub(crate) short_title: &'a str,
pub(crate) css_class: &'a str,
pub(crate) root_path: &'a str,
pub(crate) static_root_path: Option<&'a str>,

View file

@ -204,6 +204,18 @@ impl<'tcx> Context<'tcx> {
if !is_module {
title.push_str(it.name.unwrap().as_str());
}
let short_title;
let short_title = if is_module {
let module_name = self.current.last().unwrap();
short_title = if it.is_crate() {
format!("Crate {module_name}")
} else {
format!("Module {module_name}")
};
&short_title[..]
} else {
it.name.as_ref().unwrap().as_str()
};
if !it.is_primitive() && !it.is_keyword() {
if !is_module {
title.push_str(" in ");
@ -240,6 +252,7 @@ impl<'tcx> Context<'tcx> {
root_path: &self.root_path(),
static_root_path: self.shared.static_root_path.as_deref(),
title: &title,
short_title,
description: &desc,
resource_suffix: &self.shared.resource_suffix,
rust_logo: has_doc_flag(self.tcx(), LOCAL_CRATE.as_def_id(), sym::rust_logo),
@ -617,6 +630,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
let shared = &self.shared;
let mut page = layout::Page {
title: "List of all items in this crate",
short_title: "All",
css_class: "mod sys",
root_path: "../",
static_root_path: shared.static_root_path.as_deref(),

View file

@ -130,11 +130,11 @@ pub(crate) struct IndexItem {
pub(crate) ty: ItemType,
pub(crate) defid: Option<DefId>,
pub(crate) name: Symbol,
pub(crate) path: String,
pub(crate) module_path: Vec<Symbol>,
pub(crate) desc: String,
pub(crate) parent: Option<DefId>,
pub(crate) parent_idx: Option<isize>,
pub(crate) exact_path: Option<String>,
pub(crate) parent_idx: Option<usize>,
pub(crate) exact_module_path: Option<Vec<Symbol>>,
pub(crate) impl_id: Option<DefId>,
pub(crate) search_type: Option<IndexItemFunctionType>,
pub(crate) aliases: Box<[Symbol]>,
@ -150,6 +150,19 @@ struct RenderType {
}
impl RenderType {
fn size(&self) -> usize {
let mut size = 1;
if let Some(generics) = &self.generics {
size += generics.iter().map(RenderType::size).sum::<usize>();
}
if let Some(bindings) = &self.bindings {
for (_, constraints) in bindings.iter() {
size += 1;
size += constraints.iter().map(RenderType::size).sum::<usize>();
}
}
size
}
// Types are rendered as lists of lists, because that's pretty compact.
// The contents of the lists are always integers in self-terminating hex
// form, handled by `RenderTypeId::write_to_string`, so no commas are
@ -191,6 +204,62 @@ impl RenderType {
write_optional_id(self.id, string);
}
}
fn read_from_bytes(string: &[u8]) -> (RenderType, usize) {
let mut i = 0;
if string[i] == b'{' {
i += 1;
let (id, offset) = RenderTypeId::read_from_bytes(&string[i..]);
i += offset;
let generics = if string[i] == b'{' {
i += 1;
let mut generics = Vec::new();
while string[i] != b'}' {
let (ty, offset) = RenderType::read_from_bytes(&string[i..]);
i += offset;
generics.push(ty);
}
assert!(string[i] == b'}');
i += 1;
Some(generics)
} else {
None
};
let bindings = if string[i] == b'{' {
i += 1;
let mut bindings = Vec::new();
while string[i] == b'{' {
i += 1;
let (binding, boffset) = RenderTypeId::read_from_bytes(&string[i..]);
i += boffset;
let mut bconstraints = Vec::new();
assert!(string[i] == b'{');
i += 1;
while string[i] != b'}' {
let (constraint, coffset) = RenderType::read_from_bytes(&string[i..]);
i += coffset;
bconstraints.push(constraint);
}
assert!(string[i] == b'}');
i += 1;
bindings.push((binding.unwrap(), bconstraints));
assert!(string[i] == b'}');
i += 1;
}
assert!(string[i] == b'}');
i += 1;
Some(bindings)
} else {
None
};
assert!(string[i] == b'}');
i += 1;
(RenderType { id, generics, bindings }, i)
} else {
let (id, offset) = RenderTypeId::read_from_bytes(string);
i += offset;
(RenderType { id, generics: None, bindings: None }, i)
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
@ -212,7 +281,20 @@ impl RenderTypeId {
RenderTypeId::Index(idx) => (*idx).try_into().unwrap(),
_ => panic!("must convert render types to indexes before serializing"),
};
search_index::encode::write_vlqhex_to_string(id, string);
search_index::encode::write_signed_vlqhex_to_string(id, string);
}
fn read_from_bytes(string: &[u8]) -> (Option<RenderTypeId>, usize) {
let Some((value, offset)) = search_index::encode::read_signed_vlqhex_from_string(string)
else {
return (None, 0);
};
let value = isize::try_from(value).unwrap();
let ty = match value {
..0 => Some(RenderTypeId::Index(value)),
0 => None,
1.. => Some(RenderTypeId::Index(value - 1)),
};
(ty, offset)
}
}
@ -226,12 +308,64 @@ pub(crate) struct IndexItemFunctionType {
}
impl IndexItemFunctionType {
fn write_to_string<'a>(
&'a self,
string: &mut String,
backref_queue: &mut VecDeque<&'a IndexItemFunctionType>,
) {
assert!(backref_queue.len() <= 16);
fn size(&self) -> usize {
self.inputs.iter().map(RenderType::size).sum::<usize>()
+ self.output.iter().map(RenderType::size).sum::<usize>()
+ self
.where_clause
.iter()
.map(|constraints| constraints.iter().map(RenderType::size).sum::<usize>())
.sum::<usize>()
}
fn read_from_string_without_param_names(string: &[u8]) -> (IndexItemFunctionType, usize) {
let mut i = 0;
if string[i] == b'`' {
return (
IndexItemFunctionType {
inputs: Vec::new(),
output: Vec::new(),
where_clause: Vec::new(),
param_names: Vec::new(),
},
1,
);
}
assert_eq!(b'{', string[i]);
i += 1;
fn read_args_from_string(string: &[u8]) -> (Vec<RenderType>, usize) {
let mut i = 0;
let mut params = Vec::new();
if string[i] == b'{' {
// multiple params
i += 1;
while string[i] != b'}' {
let (ty, offset) = RenderType::read_from_bytes(&string[i..]);
i += offset;
params.push(ty);
}
i += 1;
} else if string[i] != b'}' {
let (tyid, offset) = RenderTypeId::read_from_bytes(&string[i..]);
params.push(RenderType { id: tyid, generics: None, bindings: None });
i += offset;
}
(params, i)
}
let (inputs, offset) = read_args_from_string(&string[i..]);
i += offset;
let (output, offset) = read_args_from_string(&string[i..]);
i += offset;
let mut where_clause = Vec::new();
while string[i] != b'}' {
let (constraint, offset) = read_args_from_string(&string[i..]);
i += offset;
where_clause.push(constraint);
}
assert_eq!(b'}', string[i], "{} {}", String::from_utf8_lossy(&string), i);
i += 1;
(IndexItemFunctionType { inputs, output, where_clause, param_names: Vec::new() }, i)
}
fn write_to_string_without_param_names<'a>(&'a self, string: &mut String) {
// If we couldn't figure out a type, just write 0,
// which is encoded as `` ` `` (see RenderTypeId::write_to_string).
let has_missing = self
@ -241,18 +375,7 @@ impl IndexItemFunctionType {
.any(|i| i.id.is_none() && i.generics.is_none());
if has_missing {
string.push('`');
} else if let Some(idx) = backref_queue.iter().position(|other| *other == self) {
// The backref queue has 16 items, so backrefs use
// a single hexit, disjoint from the ones used for numbers.
string.push(
char::try_from('0' as u32 + u32::try_from(idx).unwrap())
.expect("last possible value is '?'"),
);
} else {
backref_queue.push_front(self);
if backref_queue.len() > 16 {
backref_queue.pop_back();
}
string.push('{');
match &self.inputs[..] {
[one] if one.generics.is_none() && one.bindings.is_none() => {

View file

@ -35,6 +35,7 @@ use crate::html::format::{
visibility_print_with_space,
};
use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
use crate::html::render::sidebar::filters;
use crate::html::render::{document_full, document_item_info};
use crate::html::url_parts_builder::UrlPartsBuilder;

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,4 @@
use base64::prelude::*;
pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
pub(crate) fn write_signed_vlqhex_to_string(n: i32, string: &mut String) {
let (sign, magnitude): (bool, u32) =
if n >= 0 { (false, n.try_into().unwrap()) } else { (true, (-n).try_into().unwrap()) };
// zig-zag encoding
@ -37,206 +35,66 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
}
}
// Used during bitmap encoding
enum Container {
/// number of ones, bits
Bits(Box<[u64; 1024]>),
/// list of entries
Array(Vec<u16>),
/// list of (start, len-1)
Run(Vec<(u16, u16)>),
pub fn read_signed_vlqhex_from_string(string: &[u8]) -> Option<(i32, usize)> {
let mut n = 0i32;
let mut i = 0;
while let Some(&c) = string.get(i) {
i += 1;
n = (n << 4) | i32::from(c & 0xF);
if c >= 96 {
// zig-zag encoding
let (sign, magnitude) = (n & 1, n >> 1);
let value = if sign == 0 { 1 } else { -1 } * magnitude;
return Some((value, i));
}
}
None
}
impl Container {
fn popcount(&self) -> u32 {
match self {
Container::Bits(bits) => bits.iter().copied().map(|x| x.count_ones()).sum(),
Container::Array(array) => {
array.len().try_into().expect("array can't be bigger than 2**32")
}
Container::Run(runs) => {
runs.iter().copied().map(|(_, lenm1)| u32::from(lenm1) + 1).sum()
}
pub fn write_postings_to_string(postings: &[Vec<u32>], buf: &mut Vec<u8>) {
for list in postings {
if list.is_empty() {
buf.push(0);
continue;
}
}
fn push(&mut self, value: u16) {
match self {
Container::Bits(bits) => bits[value as usize >> 6] |= 1 << (value & 0x3F),
Container::Array(array) => {
array.push(value);
if array.len() >= 4096 {
let array = std::mem::take(array);
*self = Container::Bits(Box::new([0; 1024]));
for value in array {
self.push(value);
}
}
let len_before = buf.len();
stringdex::internals::encode::write_bitmap_to_bytes(&list, &mut *buf).unwrap();
let len_after = buf.len();
if len_after - len_before > 1 + (4 * list.len()) && list.len() < 0x3a {
buf.truncate(len_before);
buf.push(list.len() as u8);
for &item in list {
buf.push(item as u8);
buf.push((item >> 8) as u8);
buf.push((item >> 16) as u8);
buf.push((item >> 24) as u8);
}
Container::Run(runs) => {
if let Some(r) = runs.last_mut()
&& r.0 + r.1 + 1 == value
{
r.1 += 1;
} else {
runs.push((value, 0));
}
}
}
}
fn try_make_run(&mut self) -> bool {
match self {
Container::Bits(bits) => {
let mut r: u64 = 0;
for (i, chunk) in bits.iter().copied().enumerate() {
let next_chunk =
i.checked_add(1).and_then(|i| bits.get(i)).copied().unwrap_or(0);
r += !chunk & u64::from((chunk << 1).count_ones());
r += !next_chunk & u64::from((chunk >> 63).count_ones());
}
if (2 + 4 * r) >= 8192 {
return false;
}
let bits = std::mem::replace(bits, Box::new([0; 1024]));
*self = Container::Run(Vec::new());
for (i, bits) in bits.iter().copied().enumerate() {
if bits == 0 {
continue;
}
for j in 0..64 {
let value = (u16::try_from(i).unwrap() << 6) | j;
if bits & (1 << j) != 0 {
self.push(value);
}
}
}
true
}
Container::Array(array) if array.len() <= 5 => false,
Container::Array(array) => {
let mut r = 0;
let mut prev = None;
for value in array.iter().copied() {
if value.checked_sub(1) != prev {
r += 1;
}
prev = Some(value);
}
if 2 + 4 * r >= 2 * array.len() + 2 {
return false;
}
let array = std::mem::take(array);
*self = Container::Run(Vec::new());
for value in array {
self.push(value);
}
true
}
Container::Run(_) => true,
}
}
}
// checked against roaring-rs in
// https://gitlab.com/notriddle/roaring-test
pub(crate) fn write_bitmap_to_bytes(
domain: &[u32],
mut out: impl std::io::Write,
) -> std::io::Result<()> {
// https://arxiv.org/pdf/1603.06549.pdf
let mut keys = Vec::<u16>::new();
let mut containers = Vec::<Container>::new();
let mut key: u16;
let mut domain_iter = domain.iter().copied().peekable();
let mut has_run = false;
while let Some(entry) = domain_iter.next() {
key = (entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
let value: u16 = (entry & 0x00_00_FF_FF).try_into().expect("AND 16 bits, so it should fit");
let mut container = Container::Array(vec![value]);
while let Some(entry) = domain_iter.peek().copied() {
let entry_key: u16 =
(entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
if entry_key != key {
break;
pub fn read_postings_from_string(postings: &mut Vec<Vec<u32>>, mut buf: &[u8]) {
use stringdex::internals::decode::RoaringBitmap;
while let Some(&c) = buf.get(0) {
if c < 0x3a {
buf = &buf[1..];
let mut slot = Vec::new();
for _ in 0..c {
slot.push(
(buf[0] as u32)
| ((buf[1] as u32) << 8)
| ((buf[2] as u32) << 16)
| ((buf[3] as u32) << 24),
);
buf = &buf[4..];
}
domain_iter.next().expect("peeking just succeeded");
container
.push((entry & 0x00_00_FF_FF).try_into().expect("AND 16 bits, so it should fit"));
}
keys.push(key);
has_run = container.try_make_run() || has_run;
containers.push(container);
}
// https://github.com/RoaringBitmap/RoaringFormatSpec
const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346;
const SERIAL_COOKIE: u32 = 12347;
const NO_OFFSET_THRESHOLD: u32 = 4;
let size: u32 = containers.len().try_into().unwrap();
let start_offset = if has_run {
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE | ((size - 1) << 16)))?;
for set in containers.chunks(8) {
let mut b = 0;
for (i, container) in set.iter().enumerate() {
if matches!(container, &Container::Run(..)) {
b |= 1 << i;
}
}
out.write_all(&[b])?;
}
if size < NO_OFFSET_THRESHOLD {
4 + 4 * size + size.div_ceil(8)
postings.push(slot);
} else {
4 + 8 * size + size.div_ceil(8)
}
} else {
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE_NO_RUNCONTAINER))?;
out.write_all(&u32::to_le_bytes(containers.len().try_into().unwrap()))?;
4 + 4 + 4 * size + 4 * size
};
for (&key, container) in keys.iter().zip(&containers) {
// descriptive header
let key: u32 = key.into();
let count: u32 = container.popcount() - 1;
out.write_all(&u32::to_le_bytes((count << 16) | key))?;
}
if !has_run || size >= NO_OFFSET_THRESHOLD {
// offset header
let mut starting_offset = start_offset;
for container in &containers {
out.write_all(&u32::to_le_bytes(starting_offset))?;
starting_offset += match container {
Container::Bits(_) => 8192u32,
Container::Array(array) => u32::try_from(array.len()).unwrap() * 2,
Container::Run(runs) => 2 + u32::try_from(runs.len()).unwrap() * 4,
};
let (bitmap, consumed_bytes_len) =
RoaringBitmap::from_bytes(buf).unwrap_or_else(|| (RoaringBitmap::default(), 0));
assert_ne!(consumed_bytes_len, 0);
postings.push(bitmap.to_vec());
buf = &buf[consumed_bytes_len..];
}
}
for container in &containers {
match container {
Container::Bits(bits) => {
for chunk in bits.iter() {
out.write_all(&u64::to_le_bytes(*chunk))?;
}
}
Container::Array(array) => {
for value in array.iter() {
out.write_all(&u16::to_le_bytes(*value))?;
}
}
Container::Run(runs) => {
out.write_all(&u16::to_le_bytes(runs.len().try_into().unwrap()))?;
for (start, lenm1) in runs.iter().copied() {
out.write_all(&u16::to_le_bytes(start))?;
out.write_all(&u16::to_le_bytes(lenm1))?;
}
}
}
}
Ok(())
}
pub(crate) fn bitmap_to_string(domain: &[u32]) -> String {
let mut buf = Vec::new();
let mut strbuf = String::new();
write_bitmap_to_bytes(domain, &mut buf).unwrap();
BASE64_STANDARD.encode_string(&buf, &mut strbuf);
strbuf
}

View file

@ -65,17 +65,17 @@ pub(crate) fn write_shared(
// Write shared runs within a flock; disable thread dispatching of IO temporarily.
let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file);
let SerializedSearchIndex { index, desc } = build_index(krate, &mut cx.shared.cache, tcx);
write_search_desc(cx, krate, &desc)?; // does not need to be merged
let search_index =
build_index(krate, &mut cx.shared.cache, tcx, &cx.dst, &cx.shared.resource_suffix)?;
let crate_name = krate.name(cx.tcx());
let crate_name = crate_name.as_str(); // rand
let crate_name_json = OrderedJson::serialize(crate_name).unwrap(); // "rand"
let external_crates = hack_get_external_crate_names(&cx.dst, &cx.shared.resource_suffix)?;
let info = CrateInfo {
version: CrateInfoVersion::V1,
version: CrateInfoVersion::V2,
src_files_js: SourcesPart::get(cx, &crate_name_json)?,
search_index_js: SearchIndexPart::get(index, &cx.shared.resource_suffix)?,
search_index,
all_crates: AllCratesPart::get(crate_name_json.clone(), &cx.shared.resource_suffix)?,
crates_index: CratesIndexPart::get(crate_name, &external_crates)?,
trait_impl: TraitAliasPart::get(cx, &crate_name_json)?,
@ -141,7 +141,7 @@ pub(crate) fn write_not_crate_specific(
resource_suffix: &str,
include_sources: bool,
) -> Result<(), Error> {
write_rendered_cross_crate_info(crates, dst, opt, include_sources)?;
write_rendered_cross_crate_info(crates, dst, opt, include_sources, resource_suffix)?;
write_static_files(dst, opt, style_files, css_file_extension, resource_suffix)?;
Ok(())
}
@ -151,13 +151,18 @@ fn write_rendered_cross_crate_info(
dst: &Path,
opt: &RenderOptions,
include_sources: bool,
resource_suffix: &str,
) -> Result<(), Error> {
let m = &opt.should_merge;
if opt.should_emit_crate() {
if include_sources {
write_rendered_cci::<SourcesPart, _>(SourcesPart::blank, dst, crates, m)?;
}
write_rendered_cci::<SearchIndexPart, _>(SearchIndexPart::blank, dst, crates, m)?;
crates
.iter()
.fold(SerializedSearchIndex::default(), |a, b| a.union(&b.search_index))
.sort()
.write_to(dst, resource_suffix)?;
write_rendered_cci::<AllCratesPart, _>(AllCratesPart::blank, dst, crates, m)?;
}
write_rendered_cci::<TraitAliasPart, _>(TraitAliasPart::blank, dst, crates, m)?;
@ -215,38 +220,12 @@ fn write_static_files(
Ok(())
}
/// Write the search description shards to disk
fn write_search_desc(
cx: &mut Context<'_>,
krate: &Crate,
search_desc: &[(usize, String)],
) -> Result<(), Error> {
let crate_name = krate.name(cx.tcx()).to_string();
let encoded_crate_name = OrderedJson::serialize(&crate_name).unwrap();
let path = PathBuf::from_iter([&cx.dst, Path::new("search.desc"), Path::new(&crate_name)]);
if path.exists() {
try_err!(fs::remove_dir_all(&path), &path);
}
for (i, (_, part)) in search_desc.iter().enumerate() {
let filename = static_files::suffix_path(
&format!("{crate_name}-desc-{i}-.js"),
&cx.shared.resource_suffix,
);
let path = path.join(filename);
let part = OrderedJson::serialize(part).unwrap();
let part = format!("searchState.loadedDescShard({encoded_crate_name}, {i}, {part})");
create_parents(&path)?;
try_err!(fs::write(&path, part), &path);
}
Ok(())
}
/// Contains pre-rendered contents to insert into the CCI template
#[derive(Serialize, Deserialize, Clone, Debug)]
pub(crate) struct CrateInfo {
version: CrateInfoVersion,
src_files_js: PartsAndLocations<SourcesPart>,
search_index_js: PartsAndLocations<SearchIndexPart>,
search_index: SerializedSearchIndex,
all_crates: PartsAndLocations<AllCratesPart>,
crates_index: PartsAndLocations<CratesIndexPart>,
trait_impl: PartsAndLocations<TraitAliasPart>,
@ -277,7 +256,7 @@ impl CrateInfo {
/// to provide better diagnostics about including an invalid file.
#[derive(Serialize, Deserialize, Clone, Debug)]
enum CrateInfoVersion {
V1,
V2,
}
/// Paths (relative to the doc root) and their pre-merge contents
@ -331,36 +310,6 @@ trait CciPart: Sized + fmt::Display + DeserializeOwned + 'static {
fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations<Self>;
}
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
struct SearchIndex;
type SearchIndexPart = Part<SearchIndex, EscapedJson>;
impl CciPart for SearchIndexPart {
type FileFormat = sorted_template::Js;
fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations<Self> {
&crate_info.search_index_js
}
}
impl SearchIndexPart {
fn blank() -> SortedTemplate<<Self as CciPart>::FileFormat> {
SortedTemplate::from_before_after(
r"var searchIndex = new Map(JSON.parse('[",
r"]'));
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
else if (window.initSearch) window.initSearch(searchIndex);",
)
}
fn get(
search_index: OrderedJson,
resource_suffix: &str,
) -> Result<PartsAndLocations<Self>, Error> {
let path = suffix_path("search-index.js", resource_suffix);
let search_index = EscapedJson::from(search_index);
Ok(PartsAndLocations::with(path, search_index))
}
}
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
struct AllCrates;
type AllCratesPart = Part<AllCrates, OrderedJson>;
@ -426,6 +375,7 @@ impl CratesIndexPart {
fn blank(cx: &Context<'_>) -> SortedTemplate<<Self as CciPart>::FileFormat> {
let page = layout::Page {
title: "Index of crates",
short_title: "Crates",
css_class: "mod sys",
root_path: "./",
static_root_path: cx.shared.static_root_path.as_deref(),

View file

@ -29,14 +29,6 @@ fn sources_template() {
assert_eq!(but_last_line(&template.to_string()), r#"createSrcSidebar('["u","v"]');"#);
}
#[test]
fn sources_parts() {
let parts =
SearchIndexPart::get(OrderedJson::serialize(["foo", "bar"]).unwrap(), "suffix").unwrap();
assert_eq!(&parts.parts[0].0, Path::new("search-indexsuffix.js"));
assert_eq!(&parts.parts[0].1.to_string(), r#"["foo","bar"]"#);
}
#[test]
fn all_crates_template() {
let mut template = AllCratesPart::blank();
@ -54,31 +46,6 @@ fn all_crates_parts() {
assert_eq!(&parts.parts[0].1.to_string(), r#""crate""#);
}
#[test]
fn search_index_template() {
let mut template = SearchIndexPart::blank();
assert_eq!(
but_last_line(&template.to_string()),
r"var searchIndex = new Map(JSON.parse('[]'));
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
else if (window.initSearch) window.initSearch(searchIndex);"
);
template.append(EscapedJson::from(OrderedJson::serialize([1, 2]).unwrap()).to_string());
assert_eq!(
but_last_line(&template.to_string()),
r"var searchIndex = new Map(JSON.parse('[[1,2]]'));
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
else if (window.initSearch) window.initSearch(searchIndex);"
);
template.append(EscapedJson::from(OrderedJson::serialize([4, 3]).unwrap()).to_string());
assert_eq!(
but_last_line(&template.to_string()),
r"var searchIndex = new Map(JSON.parse('[[1,2],[4,3]]'));
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
else if (window.initSearch) window.initSearch(searchIndex);"
);
}
#[test]
fn crates_index_part() {
let external_crates = ["bar".to_string(), "baz".to_string()];

View file

@ -230,6 +230,7 @@ impl SourceCollector<'_, '_> {
);
let page = layout::Page {
title: &title,
short_title: &src_fname.to_string_lossy(),
css_class: "src",
root_path: &root_path,
static_root_path: shared.static_root_path.as_deref(),

View file

@ -258,6 +258,17 @@ h1, h2, h3, h4 {
padding-bottom: 6px;
margin-bottom: 15px;
}
.search-results-main-heading {
grid-template-areas:
"main-heading-breadcrumbs main-heading-placeholder"
"main-heading-breadcrumbs main-heading-toolbar "
"main-heading-h1 main-heading-toolbar ";
}
.search-results-main-heading nav.sub {
grid-area: main-heading-h1;
align-items: end;
margin: 4px 0 8px 0;
}
.rustdoc-breadcrumbs {
grid-area: main-heading-breadcrumbs;
line-height: 1.25;
@ -265,6 +276,16 @@ h1, h2, h3, h4 {
position: relative;
z-index: 1;
}
.search-switcher {
grid-area: main-heading-breadcrumbs;
line-height: 1.5;
display: flex;
color: var(--main-color);
align-items: baseline;
white-space: nowrap;
padding-top: 8px;
min-height: 34px;
}
.rustdoc-breadcrumbs a {
padding: 5px 0 7px;
}
@ -305,7 +326,7 @@ h4.code-header {
#crate-search,
h1, h2, h3, h4, h5, h6,
.sidebar,
.mobile-topbar,
rustdoc-topbar,
.search-input,
.search-results .result-name,
.item-table dt > a,
@ -317,6 +338,7 @@ rustdoc-toolbar,
summary.hideme,
.scraped-example-list,
.rustdoc-breadcrumbs,
.search-switcher,
/* This selector is for the items listed in the "all items" page. */
ul.all-items {
font-family: "Fira Sans", Arial, NanumBarunGothic, sans-serif;
@ -329,7 +351,7 @@ a.anchor,
.rust a,
.sidebar h2 a,
.sidebar h3 a,
.mobile-topbar h2 a,
rustdoc-topbar h2 a,
h1 a,
.search-results a,
.search-results li,
@ -616,7 +638,7 @@ img {
color: var(--sidebar-resizer-active);
}
.sidebar, .mobile-topbar, .sidebar-menu-toggle,
.sidebar, rustdoc-topbar, .sidebar-menu-toggle,
#src-sidebar {
background-color: var(--sidebar-background-color);
}
@ -857,7 +879,7 @@ ul.block, .block li, .block ul {
margin-bottom: 1rem;
}
.mobile-topbar {
rustdoc-topbar {
display: none;
}
@ -1098,16 +1120,15 @@ div.where {
nav.sub {
flex-grow: 1;
flex-flow: row nowrap;
margin: 4px 0 0 0;
display: flex;
align-items: center;
align-items: start;
margin-top: 4px;
}
.search-form {
position: relative;
display: flex;
height: 34px;
flex-grow: 1;
margin-bottom: 4px;
}
.src nav.sub {
margin: 0 0 -10px 0;
@ -1208,27 +1229,14 @@ table,
margin-left: 0;
}
.search-results-title {
margin-top: 0;
white-space: nowrap;
/* flex layout allows shrinking the <select> appropriately if it becomes too large */
display: flex;
/* make things look like in a line, despite the fact that we're using a layout
with boxes (i.e. from the flex layout) */
align-items: baseline;
}
.search-results-title + .sub-heading {
color: var(--main-color);
display: flex;
align-items: baseline;
white-space: nowrap;
}
#crate-search-div {
/* ensures that 100% in properties of #crate-search-div:after
are relative to the size of this div */
position: relative;
/* allows this div (and with it the <select>-element "#crate-search") to be shrunk */
min-width: 0;
/* keep label text for switcher from moving down when this appears */
margin-top: -1px;
}
#crate-search {
padding: 0 23px 0 4px;
@ -1294,6 +1302,7 @@ so that we can apply CSS-filters to change the arrow color in themes */
flex-grow: 1;
background-color: var(--button-background-color);
color: var(--search-color);
max-width: 100%;
}
.search-input:focus {
border-color: var(--search-input-focused-border-color);
@ -1459,14 +1468,14 @@ so that we can apply CSS-filters to change the arrow color in themes */
}
#settings.popover {
--popover-arrow-offset: 202px;
--popover-arrow-offset: 196px;
top: calc(100% - 16px);
}
/* use larger max-width for help popover, but not for help.html */
#help.popover {
max-width: 600px;
--popover-arrow-offset: 118px;
--popover-arrow-offset: 115px;
top: calc(100% - 16px);
}
@ -1929,10 +1938,12 @@ a.tooltip:hover::after {
color: inherit;
}
#search-tabs button:not(.selected) {
--search-tab-button-background: var(--search-tab-button-not-selected-background);
background-color: var(--search-tab-button-not-selected-background);
border-top-color: var(--search-tab-button-not-selected-border-top-color);
}
#search-tabs button:hover, #search-tabs button.selected {
--search-tab-button-background: var(--search-tab-button-selected-background);
background-color: var(--search-tab-button-selected-background);
border-top-color: var(--search-tab-button-selected-border-top-color);
}
@ -1941,6 +1952,73 @@ a.tooltip:hover::after {
font-size: 1rem;
font-variant-numeric: tabular-nums;
color: var(--search-tab-title-count-color);
position: relative;
}
#search-tabs .count.loading {
color: transparent;
}
.search-form.loading {
--search-tab-button-background: var(--button-background-color);
}
#search-tabs .count.loading::before,
.search-form.loading::before
{
width: 16px;
height: 16px;
border-radius: 16px;
background: radial-gradient(
var(--search-tab-button-background) 0 50%,
transparent 50% 100%
), conic-gradient(
var(--code-highlight-kw-color) 0deg 30deg,
var(--code-highlight-prelude-color) 30deg 60deg,
var(--code-highlight-number-color) 90deg 120deg,
var(--code-highlight-lifetime-color ) 120deg 150deg,
var(--code-highlight-comment-color) 150deg 180deg,
var(--code-highlight-self-color) 180deg 210deg,
var(--code-highlight-attribute-color) 210deg 240deg,
var(--code-highlight-literal-color) 210deg 240deg,
var(--code-highlight-macro-color) 240deg 270deg,
var(--code-highlight-question-mark-color) 270deg 300deg,
var(--code-highlight-prelude-val-color) 300deg 330deg,
var(--code-highlight-doc-comment-color) 330deg 360deg
);
content: "";
position: absolute;
left: 2px;
top: 2px;
animation: rotating 1.25s linear infinite;
}
#search-tabs .count.loading::after,
.search-form.loading::after
{
width: 18px;
height: 18px;
border-radius: 18px;
background: conic-gradient(
var(--search-tab-button-background) 0deg 180deg,
transparent 270deg 360deg
);
content: "";
position: absolute;
left: 1px;
top: 1px;
animation: rotating 0.66s linear infinite;
}
.search-form.loading::before {
left: auto;
right: 9px;
top: 8px;
}
.search-form.loading::after {
left: auto;
right: 8px;
top: 8px;
}
#search .error code {
@ -1974,7 +2052,7 @@ a.tooltip:hover::after {
border-bottom: 1px solid var(--border-color);
}
#settings-menu, #help-button, button#toggle-all-docs {
#search-button, .settings-menu, .help-menu, button#toggle-all-docs {
margin-left: var(--button-left-margin);
display: flex;
line-height: 1.25;
@ -1989,69 +2067,100 @@ a.tooltip:hover::after {
display: flex;
margin-right: 4px;
position: fixed;
margin-top: 25px;
left: 6px;
height: 34px;
width: 34px;
z-index: calc(var(--desktop-sidebar-z-index) + 1);
}
.hide-sidebar #sidebar-button {
left: 6px;
background-color: var(--main-background-color);
z-index: 1;
}
.src #sidebar-button {
margin-top: 0;
top: 8px;
left: 8px;
z-index: calc(var(--desktop-sidebar-z-index) + 1);
border-color: var(--border-color);
}
.hide-sidebar .src #sidebar-button {
position: static;
}
#settings-menu > a, #help-button > a, #sidebar-button > a, button#toggle-all-docs {
#search-button > a,
.settings-menu > a,
.help-menu > a,
#sidebar-button > a,
button#toggle-all-docs {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
}
#settings-menu > a, #help-button > a, button#toggle-all-docs {
#search-button > a,
.settings-menu > a,
.help-menu > a,
button#toggle-all-docs {
border: 1px solid transparent;
border-radius: var(--button-border-radius);
color: var(--main-color);
}
#settings-menu > a, #help-button > a, button#toggle-all-docs {
#search-button > a, .settings-menu > a, .help-menu > a, button#toggle-all-docs {
width: 80px;
border-radius: var(--toolbar-button-border-radius);
}
#settings-menu > a, #help-button > a {
#search-button > a, .settings-menu > a, .help-menu > a {
min-width: 0;
}
#sidebar-button > a {
background-color: var(--sidebar-background-color);
border: solid 1px transparent;
border-radius: var(--button-border-radius);
background-color: var(--button-background-color);
width: 33px;
}
#sidebar-button > a:hover, #sidebar-button > a:focus-visible {
background-color: var(--main-background-color);
.src #sidebar-button > a {
background-color: var(--sidebar-background-color);
border-color: var(--border-color);
}
#settings-menu > a:hover, #settings-menu > a:focus-visible,
#help-button > a:hover, #help-button > a:focus-visible,
#search-button > a:hover, #search-button > a:focus-visible,
.settings-menu > a:hover, .settings-menu > a:focus-visible,
.help-menu > a:hover, #help-menu > a:focus-visible,
#sidebar-button > a:hover, #sidebar-button > a:focus-visible,
#copy-path:hover, #copy-path:focus-visible,
button#toggle-all-docs:hover, button#toggle-all-docs:focus-visible {
border-color: var(--settings-button-border-focus);
text-decoration: none;
}
#settings-menu > a::before {
#search-button > a::before {
/* Magnifying glass */
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
width="18" height="18" viewBox="0 0 16 16">\
<circle r="5" cy="7" cx="7" style="fill:none;stroke:black;stroke-width:3"/><path \
d="M14.5,14.5 12,12" style="fill:none;stroke:black;stroke-width:3;stroke-linecap:round">\
</path><desc>Search</desc>\
</svg>');
width: 18px;
height: 18px;
filter: var(--settings-menu-filter);
}
.settings-menu > a::before {
/* Wheel <https://www.svgrepo.com/svg/384069/settings-cog-gear> */
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg">\
<path d="M10.25,6c0-0.1243286-0.0261841-0.241333-0.0366211-0.362915l1.6077881-1.5545654l\
-1.25-2.1650391 c0,0-1.2674561,0.3625488-2.1323853,0.6099854c-0.2034912-0.1431885-0.421875\
-0.2639771-0.6494751-0.3701782L7.25,0h-2.5 c0,0-0.3214111,1.2857666-0.5393066,2.1572876\
C3.9830933,2.2634888,3.7647095,2.3842773,3.5612183,2.5274658L1.428833,1.9174805 \
l-1.25,2.1650391c0,0,0.9641113,0.9321899,1.6077881,1.5545654C1.7761841,5.758667,\
1.75,5.8756714,1.75,6 s0.0261841,0.241333,0.0366211,0.362915L0.178833,7.9174805l1.25,\
2.1650391l2.1323853-0.6099854 c0.2034912,0.1432495,0.421875,0.2639771,0.6494751,0.3701782\
L4.75,12h2.5l0.5393066-2.1572876 c0.2276001-0.1062012,0.4459839-0.2269287,0.6494751\
-0.3701782l2.1323853,0.6099854l1.25-2.1650391L10.2133789,6.362915 C10.2238159,6.241333,\
10.25,6.1243286,10.25,6z M6,7.5C5.1715698,7.5,4.5,6.8284302,4.5,6S5.1715698,4.5,6,4.5S7.5\
,5.1715698,7.5,6 S6.8284302,7.5,6,7.5z" fill="black"/></svg>');
<path d="m4.75 0s-0.32117 1.286-0.53906 2.1576c-0.2276 0.1062-0.44625 \
0.2266-0.64974 0.36979l-2.1328-0.60938-1.25 2.1641s0.9644 0.93231 1.6081 1.5547c-0.010437 \
0.12158-0.036458 0.23895-0.036458 0.36328s0.026021 0.2417 0.036458 0.36328l-1.6081 \
1.5547 1.25 2.1641 2.1328-0.60937c0.20349 0.14325 0.42214 0.26359 0.64974 0.36979l0.53906 \
2.1576h2.5l0.53906-2.1576c0.2276-0.1062 0.44625-0.22654 0.64974-0.36979l2.1328 0.60937 \
1.25-2.1641-1.6081-1.5547c0.010437-0.12158 0.036458-0.23895 \
0.036458-0.36328s-0.02602-0.2417-0.03646-0.36328l1.6081-1.5547-1.25-2.1641s-1.2679 \
0.36194-2.1328 0.60938c-0.20349-0.14319-0.42214-0.26359-0.64974-0.36979l-0.53906-2.1576\
zm1.25 2.5495c1.9058-2.877e-4 3.4508 1.5447 3.4505 3.4505 2.877e-4 1.9058-1.5447 3.4508-3.4505 \
3.4505-1.9058 2.877e-4 -3.4508-1.5447-3.4505-3.4505-2.877e-4 -1.9058 1.5447-3.4508 \
3.4505-3.4505z" fill="black"/>\
<circle cx="6" cy="6" r="1.75" fill="none" stroke="black" stroke-width="1"/></svg>');
width: 18px;
height: 18px;
filter: var(--settings-menu-filter);
@ -2067,36 +2176,51 @@ button#toggle-all-docs::before {
filter: var(--settings-menu-filter);
}
button#toggle-all-docs.will-expand::before {
/* Custom arrow icon */
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg">\
<path d="M2,5l4,-4l4,4M2,7l4,4l4,-4" stroke="black" fill="none" stroke-width="2px"/></svg>');
}
#help-button > a::before {
/* Question mark with circle */
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg" fill="none">\
<circle r="5.25" cx="6" cy="6" stroke-width="1.25" stroke="black"/>\
<text x="6" y="7" style="font:8px sans-serif;font-weight:1000" text-anchor="middle" \
dominant-baseline="middle" fill="black">?</text></svg>');
.help-menu > a::before {
/* Question mark with "circle" */
content: url('data:image/svg+xml,\
<svg width="18" height="18" enable-background="new 0 0 12 12" fill="none" \
version="1.1" viewBox="0 0 12 12" xmlns="http://www.w3.org/2000/svg"> \
<path d="m6.007 0.6931c2.515 0 5.074 1.908 5.074 5.335 0 3.55-2.567 5.278-5.088 \
5.278-2.477 0-5.001-1.742-5.001-5.3 0-3.38 2.527-5.314 5.014-5.314z" stroke="black" \
stroke-width="1.5"/>\
<path d="m5.999 7.932c0.3111 0 0.7062 0.2915 0.7062 0.7257 0 0.5458-0.3951 \
0.8099-0.7081 0.8099-0.2973 0-0.7023-0.266-0.7023-0.7668 0-0.4695 0.3834-0.7688 \
0.7042-0.7688z" fill="black"/>\
<path d="m4.281 3.946c0.0312-0.03057 0.06298-0.06029 0.09528-0.08916 0.4833-0.432 1.084-0.6722 \
1.634-0.6722 1.141 0 1.508 1.043 1.221 1.621-0.2753 0.5542-1.061 0.5065-1.273 \
1.595-0.05728 0.2939 0.0134 0.9812 0.0134 1.205" fill="none" stroke="black" \
stroke-width="1.25"/>\
</svg>');
width: 18px;
height: 18px;
filter: var(--settings-menu-filter);
}
/* design hack to cope with "Help" being far shorter than "Settings" etc */
.help-menu > a {
width: 74px;
}
.help-menu > a > .label {
padding-right: 1px;
}
#toggle-all-docs:not(.will-expand) > .label {
padding-left: 1px;
}
#search-button > a::before,
button#toggle-all-docs::before,
#help-button > a::before,
#settings-menu > a::before {
.help-menu > a::before,
.settings-menu > a::before {
filter: var(--settings-menu-filter);
margin: 8px;
}
@media not (pointer: coarse) {
#search-button > a:hover::before,
button#toggle-all-docs:hover::before,
#help-button > a:hover::before,
#settings-menu > a:hover::before {
.help-menu > a:hover::before,
.settings-menu > a:hover::before {
filter: var(--settings-menu-hover-filter);
}
}
@ -2122,9 +2246,9 @@ rustdoc-toolbar span.label {
/* sidebar resizer image */
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 22 22" \
fill="none" stroke="black">\
<rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
<circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
<path d="m7.6121 3v16 M5.375 7.625h-2 m2 3h-2 m2 3h-2" stroke-width="1.25"/></svg>');
<rect x="1" y="2" width="20" height="18" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
<circle cx="4.375" cy="5.375" r="1" stroke-width=".75"/>\
<path d="m7.6121 4v14 M5.375 8.625h-2 m2 3h-2 m2 3h-2" stroke-width="1.25"/></svg>');
width: 22px;
height: 22px;
}
@ -2137,7 +2261,8 @@ rustdoc-toolbar span.label {
margin-left: 10px;
padding: 0;
padding-left: 2px;
border: 0;
border: solid 1px transparent;
border-radius: var(--button-border-radius);
font-size: 0;
}
#copy-path::before {
@ -2159,7 +2284,7 @@ rustdoc-toolbar span.label {
transform: rotate(360deg);
}
}
#settings-menu.rotate > a img {
.settings-menu.rotate > a img {
animation: rotating 2s linear infinite;
}
@ -2402,6 +2527,9 @@ However, it's not needed with smaller screen width because the doc/code block is
opacity: 0.75;
filter: var(--mobile-sidebar-menu-filter);
}
.src #sidebar-button > a:hover {
background: var(--main-background-color);
}
.sidebar-menu-toggle:hover::before,
.sidebar-menu-toggle:active::before,
.sidebar-menu-toggle:focus::before {
@ -2410,8 +2538,8 @@ However, it's not needed with smaller screen width because the doc/code block is
/* Media Queries */
/* Make sure all the buttons line wrap at the same time */
@media (max-width: 850px) {
/* Make sure all the buttons line wrap at the same time */
#search-tabs .count {
display: block;
}
@ -2421,6 +2549,81 @@ However, it's not needed with smaller screen width because the doc/code block is
.side-by-side > div {
width: auto;
}
/* Text label takes up too much space at this size. */
.main-heading {
grid-template-areas:
"main-heading-breadcrumbs main-heading-toolbar"
"main-heading-h1 main-heading-toolbar"
"main-heading-sub-heading main-heading-toolbar";
}
.search-results-main-heading {
display: grid;
grid-template-areas:
"main-heading-breadcrumbs main-heading-toolbar"
"main-heading-breadcrumbs main-heading-toolbar"
"main-heading-h1 main-heading-toolbar";
}
rustdoc-toolbar {
margin-top: -10px;
display: grid;
grid-template-areas:
"x settings help"
"search summary summary";
grid-template-rows: 35px 1fr;
}
.search-results-main-heading rustdoc-toolbar {
display: grid;
grid-template-areas:
"settings help"
"search search";
}
.search-results-main-heading #toggle-all-docs {
display: none;
}
rustdoc-toolbar .settings-menu span.label,
rustdoc-toolbar .help-menu span.label
{
display: none;
}
rustdoc-toolbar .settings-menu {
grid-area: settings;
}
rustdoc-toolbar .help-menu {
grid-area: help;
}
rustdoc-toolbar .settings-menu {
grid-area: settings;
}
rustdoc-toolbar #search-button {
grid-area: search;
}
rustdoc-toolbar #toggle-all-docs {
grid-area: summary;
}
rustdoc-toolbar .settings-menu,
rustdoc-toolbar .help-menu {
height: 35px;
}
rustdoc-toolbar .settings-menu > a,
rustdoc-toolbar .help-menu > a {
border-radius: 2px;
text-align: center;
width: 34px;
padding: 5px 0;
}
rustdoc-toolbar .settings-menu > a:before,
rustdoc-toolbar .help-menu > a:before {
margin: 0 4px;
}
#settings.popover {
top: 16px;
--popover-arrow-offset: 58px;
}
#help.popover {
top: 16px;
--popover-arrow-offset: 16px;
}
}
/*
@ -2435,7 +2638,7 @@ in src-script.js and main.js
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
or visiting a URL with a fragment like `#method.new`, we don't want the item to be obscured
by the topbar. Anything with an `id` gets scroll-margin-top equal to .mobile-topbar's size.
by the topbar. Anything with an `id` gets scroll-margin-top equal to rustdoc-topbar's size.
*/
*[id] {
scroll-margin-top: 45px;
@ -2451,18 +2654,32 @@ in src-script.js and main.js
visibility: hidden;
}
/* Text label takes up too much space at this size. */
rustdoc-toolbar span.label {
/* Pull settings and help up into the top bar. */
rustdoc-topbar span.label,
html:not(.hide-sidebar) .rustdoc:not(.src) rustdoc-toolbar .settings-menu > a,
html:not(.hide-sidebar) .rustdoc:not(.src) rustdoc-toolbar .help-menu > a
{
display: none;
}
#settings-menu > a, #help-button > a, button#toggle-all-docs {
rustdoc-topbar .settings-menu > a,
rustdoc-topbar .help-menu > a {
width: 33px;
line-height: 0;
}
rustdoc-topbar .settings-menu > a:hover,
rustdoc-topbar .help-menu > a:hover {
border: none;
background: var(--main-background-color);
border-radius: 0;
}
#settings.popover {
--popover-arrow-offset: 86px;
top: 32px;
--popover-arrow-offset: 48px;
}
#help.popover {
--popover-arrow-offset: 48px;
top: 32px;
--popover-arrow-offset: 12px;
}
.rustdoc {
@ -2471,13 +2688,13 @@ in src-script.js and main.js
display: block;
}
main {
html:not(.hide-sidebar) main {
padding-left: 15px;
padding-top: 0px;
}
/* Hide the logo and item name from the sidebar. Those are displayed
in the mobile-topbar instead. */
in the rustdoc-topbar instead. */
.sidebar .logo-container,
.sidebar .location,
.sidebar-resizer {
@ -2510,6 +2727,9 @@ in src-script.js and main.js
height: 100vh;
border: 0;
}
html .src main {
padding: 18px 0;
}
.src .search-form {
margin-left: 40px;
}
@ -2529,9 +2749,9 @@ in src-script.js and main.js
left: 0;
}
.mobile-topbar h2 {
rustdoc-topbar > h2 {
padding-bottom: 0;
margin: auto 0.5em auto auto;
margin: auto;
overflow: hidden;
/* Rare exception to specifying font sizes in rem. Since the topbar
height is specified in pixels, this also has to be specified in
@ -2540,32 +2760,34 @@ in src-script.js and main.js
font-size: 24px;
white-space: nowrap;
text-overflow: ellipsis;
text-align: center;
}
.mobile-topbar .logo-container > img {
rustdoc-topbar .logo-container > img {
max-width: 35px;
max-height: 35px;
margin: 5px 0 5px 20px;
}
.mobile-topbar {
rustdoc-topbar {
display: flex;
flex-direction: row;
position: sticky;
z-index: 10;
font-size: 2rem;
height: 45px;
width: 100%;
left: 0;
top: 0;
}
.hide-sidebar .mobile-topbar {
.hide-sidebar rustdoc-topbar {
display: none;
}
.sidebar-menu-toggle {
width: 45px;
/* prevent flexbox shrinking */
width: 41px;
min-width: 41px;
border: none;
line-height: 0;
}
@ -2591,9 +2813,13 @@ in src-script.js and main.js
#sidebar-button > a::before {
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
viewBox="0 0 22 22" fill="none" stroke="black">\
<rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
<circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
<path d="m3 7.375h16m0-3h-4" stroke-width="1.25"/></svg>');
<rect x="1" y="2" width="20" height="18" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
<g fill="black" stroke="none">\
<circle cx="4.375" cy="5.375" r="1" stroke-width=".75"/>\
<circle cx="17.375" cy="5.375" r="1" stroke-width=".75"/>\
<circle cx="14.375" cy="5.375" r="1" stroke-width=".75"/>\
</g>\
<path d="m3 8.375h16" stroke-width="1.25"/></svg>');
width: 22px;
height: 22px;
}
@ -3283,7 +3509,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
border-bottom: 1px solid rgba(242, 151, 24, 0.3);
}
:root[data-theme="ayu"] #settings-menu > a img,
:root[data-theme="ayu"] .settings-menu > a img,
:root[data-theme="ayu"] #sidebar-button > a::before {
filter: invert(100);
}

View file

@ -54,23 +54,6 @@ function showMain() {
window.rootPath = getVar("root-path");
window.currentCrate = getVar("current-crate");
function setMobileTopbar() {
// FIXME: It would be nicer to generate this text content directly in HTML,
// but with the current code it's hard to get the right information in the right place.
const mobileTopbar = document.querySelector(".mobile-topbar");
const locationTitle = document.querySelector(".sidebar h2.location");
if (mobileTopbar) {
const mobileTitle = document.createElement("h2");
mobileTitle.className = "location";
if (hasClass(document.querySelector(".rustdoc"), "crate")) {
mobileTitle.innerHTML = `Crate <a href="#">${window.currentCrate}</a>`;
} else if (locationTitle) {
mobileTitle.innerHTML = locationTitle.innerHTML;
}
mobileTopbar.appendChild(mobileTitle);
}
}
/**
* Gets the human-readable string for the virtual-key code of the
* given KeyboardEvent, ev.
@ -84,6 +67,7 @@ function setMobileTopbar() {
* So I guess you could say things are getting pretty interoperable.
*
* @param {KeyboardEvent} ev
* @returns {string}
*/
function getVirtualKey(ev) {
if ("key" in ev && typeof ev.key !== "undefined") {
@ -98,18 +82,8 @@ function getVirtualKey(ev) {
}
const MAIN_ID = "main-content";
const SETTINGS_BUTTON_ID = "settings-menu";
const ALTERNATIVE_DISPLAY_ID = "alternative-display";
const NOT_DISPLAYED_ID = "not-displayed";
const HELP_BUTTON_ID = "help-button";
function getSettingsButton() {
return document.getElementById(SETTINGS_BUTTON_ID);
}
function getHelpButton() {
return document.getElementById(HELP_BUTTON_ID);
}
// Returns the current URL without any query parameter or hash.
function getNakedUrl() {
@ -174,7 +148,7 @@ function getNotDisplayedElem() {
* contains the displayed element (there can be only one at the same time!). So basically, we switch
* elements between the two `<section>` elements.
*
* @param {HTMLElement|null} elemToDisplay
* @param {Element|null} elemToDisplay
*/
function switchDisplayedElement(elemToDisplay) {
const el = getAlternativeDisplayElem();
@ -239,14 +213,14 @@ function preLoadCss(cssUrl) {
document.head.append(script);
}
const settingsButton = getSettingsButton();
if (settingsButton) {
settingsButton.onclick = event => {
onEachLazy(document.querySelectorAll(".settings-menu"), settingsMenu => {
/** @param {MouseEvent} event */
settingsMenu.querySelector("a").onclick = event => {
if (event.ctrlKey || event.altKey || event.metaKey) {
return;
}
window.hideAllModals(false);
addClass(getSettingsButton(), "rotate");
addClass(settingsMenu, "rotate");
event.preventDefault();
// Sending request for the CSS and the JS files at the same time so it will
// hopefully be loaded when the JS will generate the settings content.
@ -268,15 +242,42 @@ function preLoadCss(cssUrl) {
}
}, 0);
};
}
});
window.searchState = {
rustdocToolbar: document.querySelector("rustdoc-toolbar"),
loadingText: "Loading search results...",
// This will always be an HTMLInputElement, but tsc can't see that
// @ts-expect-error
input: document.getElementsByClassName("search-input")[0],
outputElement: () => {
inputElement: () => {
let el = document.getElementsByClassName("search-input")[0];
if (!el) {
const out = nonnull(nonnull(window.searchState.outputElement()).parentElement);
const hdr = document.createElement("div");
hdr.className = "main-heading search-results-main-heading";
const params = window.searchState.getQueryStringParams();
const autofocusParam = params.search === "" ? "autofocus" : "";
hdr.innerHTML = `<nav class="sub">
<form class="search-form loading">
<span></span> <!-- This empty span is a hacky fix for Safari: see #93184 -->
<input
${autofocusParam}
class="search-input"
name="search"
aria-label="Run search in the documentation"
autocomplete="off"
spellcheck="false"
placeholder="Type S or / to search, ? for more options…"
type="search">
</form>
</nav><div class="search-switcher"></div>`;
out.insertBefore(hdr, window.searchState.outputElement());
el = document.getElementsByClassName("search-input")[0];
}
if (el instanceof HTMLInputElement) {
return el;
}
return null;
},
containerElement: () => {
let el = document.getElementById("search");
if (!el) {
el = document.createElement("section");
@ -285,6 +286,19 @@ function preLoadCss(cssUrl) {
}
return el;
},
outputElement: () => {
const container = window.searchState.containerElement();
if (!container) {
return null;
}
let el = container.querySelector(".search-out");
if (!el) {
el = document.createElement("div");
el.className = "search-out";
container.appendChild(el);
}
return el;
},
title: document.title,
titleBeforeSearch: document.title,
timeout: null,
@ -303,25 +317,52 @@ function preLoadCss(cssUrl) {
}
},
isDisplayed: () => {
const outputElement = window.searchState.outputElement();
return !!outputElement &&
!!outputElement.parentElement &&
outputElement.parentElement.id === ALTERNATIVE_DISPLAY_ID;
const container = window.searchState.containerElement();
if (!container) {
return false;
}
return !!container.parentElement && container.parentElement.id ===
ALTERNATIVE_DISPLAY_ID;
},
// Sets the focus on the search bar at the top of the page
focus: () => {
window.searchState.input && window.searchState.input.focus();
const inputElement = window.searchState.inputElement();
window.searchState.showResults();
if (inputElement) {
inputElement.focus();
// Avoid glitch if something focuses the search button after clicking.
requestAnimationFrame(() => inputElement.focus());
}
},
// Removes the focus from the search bar.
defocus: () => {
window.searchState.input && window.searchState.input.blur();
nonnull(window.searchState.inputElement()).blur();
},
showResults: search => {
if (search === null || typeof search === "undefined") {
search = window.searchState.outputElement();
toggle: () => {
if (window.searchState.isDisplayed()) {
window.searchState.defocus();
window.searchState.hideResults();
} else {
window.searchState.focus();
}
switchDisplayedElement(search);
},
showResults: () => {
document.title = window.searchState.title;
if (window.searchState.isDisplayed()) {
return;
}
const search = window.searchState.containerElement();
switchDisplayedElement(search);
const btn = document.querySelector("#search-button a");
if (browserSupportsHistoryApi() && btn instanceof HTMLAnchorElement &&
window.searchState.getQueryStringParams().search === undefined
) {
history.pushState(null, "", btn.href);
}
const btnLabel = document.querySelector("#search-button a span.label");
if (btnLabel) {
btnLabel.innerHTML = "Exit";
}
},
removeQueryParameters: () => {
// We change the document title.
@ -334,6 +375,10 @@ function preLoadCss(cssUrl) {
switchDisplayedElement(null);
// We also remove the query parameter from the URL.
window.searchState.removeQueryParameters();
const btnLabel = document.querySelector("#search-button a span.label");
if (btnLabel) {
btnLabel.innerHTML = "Search";
}
},
getQueryStringParams: () => {
/** @type {Object.<any, string>} */
@ -348,11 +393,11 @@ function preLoadCss(cssUrl) {
return params;
},
setup: () => {
const search_input = window.searchState.input;
let searchLoaded = false;
const search_input = window.searchState.inputElement();
if (!search_input) {
return;
}
let searchLoaded = false;
// If you're browsing the nightly docs, the page might need to be refreshed for the
// search to work because the hash of the JS scripts might have changed.
function sendSearchForm() {
@ -363,21 +408,102 @@ function preLoadCss(cssUrl) {
if (!searchLoaded) {
searchLoaded = true;
// @ts-expect-error
loadScript(getVar("static-root-path") + getVar("search-js"), sendSearchForm);
loadScript(resourcePath("search-index", ".js"), sendSearchForm);
window.rr_ = data => {
// @ts-expect-error
window.searchIndex = data;
};
if (!window.StringdexOnload) {
window.StringdexOnload = [];
}
window.StringdexOnload.push(() => {
loadScript(
// @ts-expect-error
getVar("static-root-path") + getVar("search-js"),
sendSearchForm,
);
});
// @ts-expect-error
loadScript(getVar("static-root-path") + getVar("stringdex-js"), sendSearchForm);
loadScript(resourcePath("search.index/root", ".js"), sendSearchForm);
}
}
search_input.addEventListener("focus", () => {
window.searchState.origPlaceholder = search_input.placeholder;
search_input.placeholder = "Type your search here.";
loadSearch();
});
if (search_input.value !== "") {
loadSearch();
const btn = document.getElementById("search-button");
if (btn) {
btn.onclick = event => {
if (event.ctrlKey || event.altKey || event.metaKey) {
return;
}
event.preventDefault();
window.searchState.toggle();
loadSearch();
};
}
// Push and pop states are used to add search results to the browser
// history.
if (browserSupportsHistoryApi()) {
// Store the previous <title> so we can revert back to it later.
const previousTitle = document.title;
window.addEventListener("popstate", e => {
const params = window.searchState.getQueryStringParams();
// Revert to the previous title manually since the History
// API ignores the title parameter.
document.title = previousTitle;
// Synchronize search bar with query string state and
// perform the search. This will empty the bar if there's
// nothing there, which lets you really go back to a
// previous state with nothing in the bar.
const inputElement = window.searchState.inputElement();
if (params.search !== undefined && inputElement !== null) {
loadSearch();
inputElement.value = params.search;
// Some browsers fire "onpopstate" for every page load
// (Chrome), while others fire the event only when actually
// popping a state (Firefox), which is why search() is
// called both here and at the end of the startSearch()
// function.
e.preventDefault();
window.searchState.showResults();
if (params.search === "") {
window.searchState.focus();
}
} else {
// When browsing back from search results the main page
// visibility must be reset.
window.searchState.hideResults();
}
});
}
// This is required in firefox to avoid this problem: Navigating to a search result
// with the keyboard, hitting enter, and then hitting back would take you back to
// the doc page, rather than the search that should overlay it.
// This was an interaction between the back-forward cache and our handlers
// that try to sync state between the URL and the search input. To work around it,
// do a small amount of re-init on page show.
window.onpageshow = () => {
const inputElement = window.searchState.inputElement();
const qSearch = window.searchState.getQueryStringParams().search;
if (qSearch !== undefined && inputElement !== null) {
if (inputElement.value === "") {
inputElement.value = qSearch;
}
window.searchState.showResults();
if (qSearch === "") {
loadSearch();
window.searchState.focus();
}
} else {
window.searchState.hideResults();
}
};
const params = window.searchState.getQueryStringParams();
if (params.search !== undefined) {
window.searchState.setLoadingSearch();
@ -386,13 +512,9 @@ function preLoadCss(cssUrl) {
},
setLoadingSearch: () => {
const search = window.searchState.outputElement();
if (!search) {
return;
}
search.innerHTML = "<h3 class=\"search-loading\">" +
window.searchState.loadingText +
"</h3>";
window.searchState.showResults(search);
nonnull(search).innerHTML = "<h3 class=\"search-loading\">" +
window.searchState.loadingText + "</h3>";
window.searchState.showResults();
},
descShards: new Map(),
loadDesc: async function({descShard, descIndex}) {
@ -1500,15 +1622,13 @@ function preLoadCss(cssUrl) {
// @ts-expect-error
function helpBlurHandler(event) {
// @ts-expect-error
if (!getHelpButton().contains(document.activeElement) &&
// @ts-expect-error
!getHelpButton().contains(event.relatedTarget) &&
// @ts-expect-error
!getSettingsButton().contains(document.activeElement) &&
// @ts-expect-error
!getSettingsButton().contains(event.relatedTarget)
) {
const isInPopover = onEachLazy(
document.querySelectorAll(".settings-menu, .help-menu"),
menu => {
return menu.contains(document.activeElement) || menu.contains(event.relatedTarget);
},
);
if (!isInPopover) {
window.hidePopoverMenus();
}
}
@ -1529,7 +1649,7 @@ function preLoadCss(cssUrl) {
["&#9166;", "Go to active search result"],
["+", "Expand all sections"],
["-", "Collapse all sections"],
// for the sake of brevity, we don't say "inherint impl blocks",
// for the sake of brevity, we don't say "inherit impl blocks",
// although that would be more correct,
// since trait impl blocks are collapsed by -
["_", "Collapse all sections, including impl blocks"],
@ -1571,10 +1691,9 @@ function preLoadCss(cssUrl) {
const container = document.createElement("div");
if (!isHelpPage) {
container.className = "popover";
container.className = "popover content";
}
container.id = "help";
container.style.display = "none";
const side_by_side = document.createElement("div");
side_by_side.className = "side-by-side";
@ -1590,17 +1709,16 @@ function preLoadCss(cssUrl) {
help_section.appendChild(container);
// @ts-expect-error
document.getElementById("main-content").appendChild(help_section);
container.style.display = "block";
} else {
const help_button = getHelpButton();
// @ts-expect-error
help_button.appendChild(container);
container.onblur = helpBlurHandler;
// @ts-expect-error
help_button.onblur = helpBlurHandler;
// @ts-expect-error
help_button.children[0].onblur = helpBlurHandler;
onEachLazy(document.getElementsByClassName("help-menu"), menu => {
if (menu.offsetWidth !== 0) {
menu.appendChild(container);
container.onblur = helpBlurHandler;
menu.onblur = helpBlurHandler;
menu.children[0].onblur = helpBlurHandler;
return true;
}
});
}
return container;
@ -1621,80 +1739,57 @@ function preLoadCss(cssUrl) {
* Hide all the popover menus.
*/
window.hidePopoverMenus = () => {
onEachLazy(document.querySelectorAll("rustdoc-toolbar .popover"), elem => {
onEachLazy(document.querySelectorAll(".settings-menu .popover"), elem => {
elem.style.display = "none";
});
const button = getHelpButton();
if (button) {
removeClass(button, "help-open");
}
onEachLazy(document.querySelectorAll(".help-menu .popover"), elem => {
elem.parentElement.removeChild(elem);
});
};
/**
* Returns the help menu element (not the button).
*
* @param {boolean} buildNeeded - If this argument is `false`, the help menu element won't be
* built if it doesn't exist.
*
* @return {HTMLElement}
*/
function getHelpMenu(buildNeeded) {
// @ts-expect-error
let menu = getHelpButton().querySelector(".popover");
if (!menu && buildNeeded) {
menu = buildHelpMenu();
}
// @ts-expect-error
return menu;
}
/**
* Show the help popup menu.
*/
function showHelp() {
window.hideAllModals(false);
// Prevent `blur` events from being dispatched as a result of closing
// other modals.
const button = getHelpButton();
addClass(button, "help-open");
// @ts-expect-error
button.querySelector("a").focus();
const menu = getHelpMenu(true);
if (menu.style.display === "none") {
// @ts-expect-error
window.hideAllModals();
menu.style.display = "";
}
onEachLazy(document.querySelectorAll(".help-menu a"), menu => {
if (menu.offsetWidth !== 0) {
menu.focus();
return true;
}
});
buildHelpMenu();
}
const helpLink = document.querySelector(`#${HELP_BUTTON_ID} > a`);
if (isHelpPage) {
buildHelpMenu();
} else if (helpLink) {
helpLink.addEventListener("click", event => {
// By default, have help button open docs in a popover.
// If user clicks with a moderator, though, use default browser behavior,
// probably opening in a new window or tab.
if (!helpLink.contains(helpLink) ||
// @ts-expect-error
event.ctrlKey ||
// @ts-expect-error
event.altKey ||
// @ts-expect-error
event.metaKey) {
return;
}
event.preventDefault();
const menu = getHelpMenu(true);
const shouldShowHelp = menu.style.display === "none";
if (shouldShowHelp) {
showHelp();
} else {
window.hidePopoverMenus();
}
} else {
onEachLazy(document.querySelectorAll(".help-menu > a"), helpLink => {
helpLink.addEventListener(
"click",
/** @param {MouseEvent} event */
event => {
// By default, have help button open docs in a popover.
// If user clicks with a moderator, though, use default browser behavior,
// probably opening in a new window or tab.
if (event.ctrlKey ||
event.altKey ||
event.metaKey) {
return;
}
event.preventDefault();
if (document.getElementById("help")) {
window.hidePopoverMenus();
} else {
showHelp();
}
},
);
});
}
setMobileTopbar();
addSidebarItems();
addSidebarCrates();
onHashChange(null);
@ -1746,7 +1841,15 @@ function preLoadCss(cssUrl) {
// On larger, "desktop-sized" viewports (though that includes many
// tablets), it's fixed-position, appears in the left side margin,
// and it can be activated by resizing the sidebar into nothing.
const sidebarButton = document.getElementById("sidebar-button");
let sidebarButton = document.getElementById("sidebar-button");
const body = document.querySelector(".main-heading");
if (!sidebarButton && body) {
sidebarButton = document.createElement("div");
sidebarButton.id = "sidebar-button";
const path = `${window.rootPath}${window.currentCrate}/all.html`;
sidebarButton.innerHTML = `<a href="${path}" title="show sidebar"></a>`;
body.insertBefore(sidebarButton, body.firstChild);
}
if (sidebarButton) {
sidebarButton.addEventListener("click", e => {
removeClass(document.documentElement, "hide-sidebar");

View file

@ -2,6 +2,8 @@
// not put into the JavaScript we include as part of the documentation. It is used for
// type checking. See README.md in this directory for more info.
import { RoaringBitmap } from "./stringdex";
/* eslint-disable */
declare global {
/** Search engine data used by main.js and search.js */
@ -10,6 +12,17 @@ declare global {
declare function nonnull(x: T|null, msg: string|undefined);
/** Defined and documented in `storage.js` */
declare function nonundef(x: T|undefined, msg: string|undefined);
interface PromiseConstructor {
/**
* Polyfill
* @template T
*/
withResolvers: function(): {
"promise": Promise<T>,
"resolve": (function(T): void),
"reject": (function(any): void)
};
}
interface Window {
/** Make the current theme easy to find */
currentTheme: HTMLLinkElement|null;
@ -95,29 +108,28 @@ declare namespace rustdoc {
interface SearchState {
rustdocToolbar: HTMLElement|null;
loadingText: string;
input: HTMLInputElement|null;
inputElement: function(): HTMLInputElement|null;
containerElement: function(): Element|null;
title: string;
titleBeforeSearch: string;
timeout: number|null;
timeout: ReturnType<typeof setTimeout>|null;
currentTab: number;
focusedByTab: [number|null, number|null, number|null];
focusedByTab: [Element|null, Element|null, Element|null];
clearInputTimeout: function;
outputElement(): HTMLElement|null;
focus();
defocus();
// note: an optional param is not the same as
// a nullable/undef-able param.
showResults(elem?: HTMLElement|null);
removeQueryParameters();
hideResults();
getQueryStringParams(): Object.<any, string>;
origPlaceholder: string;
outputElement: function(): Element|null;
focus: function();
defocus: function();
toggle: function();
showResults: function();
removeQueryParameters: function();
hideResults: function();
getQueryStringParams: function(): Object.<any, string>;
setup: function();
setLoadingSearch();
descShards: Map<string, SearchDescShard[]>;
loadDesc: function({descShard: SearchDescShard, descIndex: number}): Promise<string|null>;
loadedDescShard(string, number, string);
isDisplayed(): boolean,
loadedDescShard: function(string, number, string);
isDisplayed: function(): boolean;
}
interface SearchDescShard {
@ -131,12 +143,13 @@ declare namespace rustdoc {
* A single parsed "atom" in a search query. For example,
*
* std::fmt::Formatter, Write -> Result<()>
*
* QueryElement {
* name: Result
* generics: [
* QueryElement
* name: ()
*
* QueryElement {
* name: Result
* generics: [
* QueryElement {
* name: ()
* }
* ]
* }
* QueryElement {
@ -156,14 +169,14 @@ declare namespace rustdoc {
normalizedPathLast: string,
generics: Array<QueryElement>,
bindings: Map<number, Array<QueryElement>>,
typeFilter: number|null,
typeFilter: number,
}
/**
* Same as QueryElement, but bindings and typeFilter support strings
*/
interface ParserQueryElement {
name: string|null,
name: string,
id: number|null,
fullPath: Array<string>,
pathWithoutLast: Array<string>,
@ -172,7 +185,7 @@ declare namespace rustdoc {
generics: Array<ParserQueryElement>,
bindings: Map<string, Array<ParserQueryElement>>,
bindingName: {name: string|null, generics: ParserQueryElement[]}|null,
typeFilter: number|string|null,
typeFilter: string|null,
}
/**
@ -215,35 +228,74 @@ declare namespace rustdoc {
/**
* An entry in the search index database.
*/
interface EntryData {
krate: number,
ty: ItemType,
modulePath: number?,
exactModulePath: number?,
parent: number?,
deprecated: boolean,
associatedItemDisambiguator: string?,
}
/**
* A path in the search index database
*/
interface PathData {
ty: ItemType,
modulePath: string,
exactModulePath: string?,
}
/**
* A function signature in the search index database
*
* Note that some non-function items (eg. constants, struct fields) have a function signature so they can appear in type-based search.
*/
interface FunctionData {
functionSignature: FunctionSearchType|null,
paramNames: string[],
elemCount: number,
}
/**
* A function signature in the search index database
*/
interface TypeData {
searchUnbox: boolean,
invertedFunctionSignatureIndex: RoaringBitmap[],
}
/**
* A search entry of some sort.
*/
interface Row {
crate: string,
descShard: SearchDescShard,
id: number,
// This is the name of the item. For doc aliases, if you want the name of the aliased
// item, take a look at `Row.original.name`.
crate: string,
ty: ItemType,
name: string,
normalizedName: string,
word: string,
paramNames: string[],
parent: ({ty: number, name: string, path: string, exactPath: string}|null|undefined),
path: string,
ty: number,
type: FunctionSearchType | null,
descIndex: number,
bitIndex: number,
implDisambiguator: String | null,
is_alias?: boolean,
original?: Row,
modulePath: string,
exactModulePath: string,
entry: EntryData?,
path: PathData?,
type: FunctionData?,
deprecated: boolean,
parent: { path: PathData, name: string}?,
}
type ItemType = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 |
21 | 22 | 23 | 24 | 25 | 26;
/**
* The viewmodel for the search engine results page.
*/
interface ResultsTable {
in_args: Array<ResultObject>,
returned: Array<ResultObject>,
others: Array<ResultObject>,
query: ParsedQuery,
in_args: AsyncGenerator<ResultObject>,
returned: AsyncGenerator<ResultObject>,
others: AsyncGenerator<ResultObject>,
query: ParsedQuery<rustdoc.ParserQueryElement>,
}
type Results = { max_dist?: number } & Map<number, ResultObject>
@ -252,25 +304,41 @@ declare namespace rustdoc {
* An annotated `Row`, used in the viewmodel.
*/
interface ResultObject {
desc: string,
desc: Promise<string|null>,
displayPath: string,
fullPath: string,
href: string,
id: number,
dist: number,
path_dist: number,
name: string,
normalizedName: string,
word: string,
index: number,
parent: (Object|undefined),
path: string,
ty: number,
parent: ({
path: string,
exactPath: string,
name: string,
ty: number,
}|undefined),
type?: FunctionSearchType,
paramNames?: string[],
displayTypeSignature: Promise<rustdoc.DisplayTypeSignature> | null,
item: Row,
dontValidate?: boolean,
is_alias: boolean,
alias?: string,
}
/**
* An annotated `Row`, used in the viewmodel.
*/
interface PlainResultObject {
id: number,
dist: number,
path_dist: number,
index: number,
elems: rustdoc.QueryElement[],
returned: rustdoc.QueryElement[],
is_alias: boolean,
alias?: string,
original?: rustdoc.Rlow,
}
/**
@ -364,7 +432,19 @@ declare namespace rustdoc {
* Numeric IDs are *ONE-indexed* into the paths array (`p`). Zero is used as a sentinel for `null`
* because `null` is four bytes while `0` is one byte.
*/
type RawFunctionType = number | [number, Array<RawFunctionType>];
type RawFunctionType = number | [number, Array<RawFunctionType>] | [number, Array<RawFunctionType>, Array<[RawFunctionType, RawFunctionType[]]>];
/**
* Utility typedef for deserializing compact JSON.
*
* R is the required part, O is the optional part, which goes afterward.
* For example, `ArrayWithOptionals<[A, B], [C, D]>` matches
* `[A, B] | [A, B, C] | [A, B, C, D]`.
*/
type ArrayWithOptionals<R extends any[], O extends any[]> =
O extends [infer First, ...infer Rest] ?
R | ArrayWithOptionals<[...R, First], Rest> :
R;
/**
* The type signature entry in the decoded search index.
@ -382,8 +462,8 @@ declare namespace rustdoc {
*/
interface FunctionType {
id: null|number,
ty: number|null,
name?: string,
ty: ItemType,
name: string|null,
path: string|null,
exactPath: string|null,
unboxFlag: boolean,
@ -403,70 +483,6 @@ declare namespace rustdoc {
bindings: Map<number, FingerprintableType[]>;
};
/**
* The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
* are arrays with the same length. `q`, `a`, and `c` use a sparse
* representation for compactness.
*
* `n[i]` contains the name of an item.
*
* `t[i]` contains the type of that item
* (as a string of characters that represent an offset in `itemTypes`).
*
* `d[i]` contains the description of that item.
*
* `q` contains the full paths of the items. For compactness, it is a set of
* (index, path) pairs used to create a map. If a given index `i` is
* not present, this indicates "same as the last index present".
*
* `i[i]` contains an item's parent, usually a module. For compactness,
* it is a set of indexes into the `p` array.
*
* `f` contains function signatures, or `0` if the item isn't a function.
* More information on how they're encoded can be found in rustc-dev-guide
*
* Functions are themselves encoded as arrays. The first item is a list of
* types representing the function's inputs, and the second list item is a list
* of types representing the function's output. Tuples are flattened.
* Types are also represented as arrays; the first item is an index into the `p`
* array, while the second is a list of types representing any generic parameters.
*
* b[i] contains an item's impl disambiguator. This is only present if an item
* is defined in an impl block and, the impl block's type has more than one associated
* item with the same name.
*
* `a` defines aliases with an Array of pairs: [name, offset], where `offset`
* points into the n/t/d/q/i/f arrays.
*
* `doc` contains the description of the crate.
*
* `p` is a list of path/type pairs. It is used for parents and function parameters.
* The first item is the type, the second is the name, the third is the visible path (if any) and
* the fourth is the canonical path used for deduplication (if any).
*
* `r` is the canonical path used for deduplication of re-exported items.
* It is not used for associated items like methods (that's the fourth element
* of `p`) but is used for modules items like free functions.
*
* `c` is an array of item indices that are deprecated.
*/
type RawSearchIndexCrate = {
doc: string,
a: { [key: string]: number[] },
n: Array<string>,
t: string,
D: string,
e: string,
q: Array<[number, string]>,
i: string,
f: string,
p: Array<[number, string] | [number, string, number] | [number, string, number, number] | [number, string, number, number, string]>,
b: Array<[number, String]>,
c: string,
r: Array<[number, number]>,
P: Array<[number, string]>,
};
type VlqData = VlqData[] | number;
/**

File diff suppressed because it is too large Load diff

View file

@ -1,25 +1,13 @@
// Local js definitions:
/* global getSettingValue, updateLocalStorage, updateTheme */
/* global addClass, removeClass, onEach, onEachLazy */
/* global MAIN_ID, getVar, getSettingsButton, getHelpButton, nonnull */
/* global MAIN_ID, getVar, nonnull */
"use strict";
(function() {
const isSettingsPage = window.location.pathname.endsWith("/settings.html");
/**
* @param {Element} elem
* @param {EventTarget|null} target
*/
function elemContainsTarget(elem, target) {
if (target instanceof Node) {
return elem.contains(target);
} else {
return false;
}
}
/**
* @overload {"theme"|"preferred-dark-theme"|"preferred-light-theme"}
* @param {string} settingName
@ -305,10 +293,12 @@
}
} else {
el.setAttribute("tabindex", "-1");
const settingsBtn = getSettingsButton();
if (settingsBtn !== null) {
settingsBtn.appendChild(el);
}
onEachLazy(document.querySelectorAll(".settings-menu"), menu => {
if (menu.offsetWidth !== 0) {
menu.appendChild(el);
return true;
}
});
}
return el;
}
@ -317,6 +307,15 @@
function displaySettings() {
settingsMenu.style.display = "";
onEachLazy(document.querySelectorAll(".settings-menu"), menu => {
if (menu.offsetWidth !== 0) {
if (!menu.contains(settingsMenu) && settingsMenu.parentElement) {
settingsMenu.parentElement.removeChild(settingsMenu);
menu.appendChild(settingsMenu);
}
return true;
}
});
onEachLazy(settingsMenu.querySelectorAll("input[type='checkbox']"), el => {
const val = getSettingValue(el.id);
const checked = val === "true";
@ -330,40 +329,37 @@
* @param {FocusEvent} event
*/
function settingsBlurHandler(event) {
const helpBtn = getHelpButton();
const settingsBtn = getSettingsButton();
const helpUnfocused = helpBtn === null ||
(!helpBtn.contains(document.activeElement) &&
!elemContainsTarget(helpBtn, event.relatedTarget));
const settingsUnfocused = settingsBtn === null ||
(!settingsBtn.contains(document.activeElement) &&
!elemContainsTarget(settingsBtn, event.relatedTarget));
if (helpUnfocused && settingsUnfocused) {
const isInPopover = onEachLazy(
document.querySelectorAll(".settings-menu, .help-menu"),
menu => {
return menu.contains(document.activeElement) || menu.contains(event.relatedTarget);
},
);
if (!isInPopover) {
window.hidePopoverMenus();
}
}
if (!isSettingsPage) {
// We replace the existing "onclick" callback.
// These elements must exist, as (outside of the settings page)
// `settings.js` is only loaded after the settings button is clicked.
const settingsButton = nonnull(getSettingsButton());
const settingsMenu = nonnull(document.getElementById("settings"));
settingsButton.onclick = event => {
if (elemContainsTarget(settingsMenu, event.target)) {
return;
}
event.preventDefault();
const shouldDisplaySettings = settingsMenu.style.display === "none";
onEachLazy(document.querySelectorAll(".settings-menu"), settingsButton => {
/** @param {MouseEvent} event */
settingsButton.querySelector("a").onclick = event => {
if (!(event.target instanceof Element) || settingsMenu.contains(event.target)) {
return;
}
event.preventDefault();
const shouldDisplaySettings = settingsMenu.style.display === "none";
window.hideAllModals(false);
if (shouldDisplaySettings) {
displaySettings();
}
};
settingsButton.onblur = settingsBlurHandler;
// the settings button should always have a link in it
nonnull(settingsButton.querySelector("a")).onblur = settingsBlurHandler;
window.hideAllModals(false);
if (shouldDisplaySettings) {
displaySettings();
}
};
settingsButton.onblur = settingsBlurHandler;
settingsButton.querySelector("a").onblur = settingsBlurHandler;
});
onEachLazy(settingsMenu.querySelectorAll("input"), el => {
el.onblur = settingsBlurHandler;
});
@ -377,6 +373,8 @@
if (!isSettingsPage) {
displaySettings();
}
removeClass(getSettingsButton(), "rotate");
onEachLazy(document.querySelectorAll(".settings-menu"), settingsButton => {
removeClass(settingsButton, "rotate");
});
}, 0);
})();

View file

@ -7,6 +7,7 @@
/**
* @import * as rustdoc from "./rustdoc.d.ts";
* @import * as stringdex from "./stringdex.d.ts";
*/
const builtinThemes = ["light", "dark", "ayu"];
@ -172,7 +173,7 @@ function updateLocalStorage(name, value) {
} else {
window.localStorage.setItem("rustdoc-" + name, value);
}
} catch (e) {
} catch {
// localStorage is not accessible, do nothing
}
}
@ -189,7 +190,7 @@ function updateLocalStorage(name, value) {
function getCurrentValue(name) {
try {
return window.localStorage.getItem("rustdoc-" + name);
} catch (e) {
} catch {
return null;
}
}
@ -375,32 +376,6 @@ window.addEventListener("pageshow", ev => {
// That's also why this is in storage.js and not main.js.
//
// [parser]: https://html.spec.whatwg.org/multipage/parsing.html
class RustdocSearchElement extends HTMLElement {
constructor() {
super();
}
connectedCallback() {
const rootPath = getVar("root-path");
const currentCrate = getVar("current-crate");
this.innerHTML = `<nav class="sub">
<form class="search-form">
<span></span> <!-- This empty span is a hacky fix for Safari - See #93184 -->
<div id="sidebar-button" tabindex="-1">
<a href="${rootPath}${currentCrate}/all.html" title="show sidebar"></a>
</div>
<input
class="search-input"
name="search"
aria-label="Run search in the documentation"
autocomplete="off"
spellcheck="false"
placeholder="Type S or / to search, ? for more options…"
type="search">
</form>
</nav>`;
}
}
window.customElements.define("rustdoc-search", RustdocSearchElement);
class RustdocToolbarElement extends HTMLElement {
constructor() {
super();
@ -411,11 +386,15 @@ class RustdocToolbarElement extends HTMLElement {
return;
}
const rootPath = getVar("root-path");
const currentUrl = window.location.href.split("?")[0].split("#")[0];
this.innerHTML = `
<div id="settings-menu" tabindex="-1">
<div id="search-button" tabindex="-1">
<a href="${currentUrl}?search="><span class="label">Search</span></a>
</div>
<div class="settings-menu" tabindex="-1">
<a href="${rootPath}settings.html"><span class="label">Settings</span></a>
</div>
<div id="help-button" tabindex="-1">
<div class="help-menu" tabindex="-1">
<a href="${rootPath}help.html"><span class="label">Help</span></a>
</div>
<button id="toggle-all-docs"
@ -424,3 +403,31 @@ class="label">Summary</span></button>`;
}
}
window.customElements.define("rustdoc-toolbar", RustdocToolbarElement);
class RustdocTopBarElement extends HTMLElement {
constructor() {
super();
}
connectedCallback() {
const rootPath = getVar("root-path");
const tmplt = document.createElement("template");
tmplt.innerHTML = `
<slot name="sidebar-menu-toggle"></slot>
<slot></slot>
<slot name="settings-menu"></slot>
<slot name="help-menu"></slot>
`;
const shadow = this.attachShadow({ mode: "open" });
shadow.appendChild(tmplt.content.cloneNode(true));
this.innerHTML += `
<button class="sidebar-menu-toggle" slot="sidebar-menu-toggle" title="show sidebar">
</button>
<div class="settings-menu" slot="settings-menu" tabindex="-1">
<a href="${rootPath}settings.html"><span class="label">Settings</span></a>
</div>
<div class="help-menu" slot="help-menu" tabindex="-1">
<a href="${rootPath}help.html"><span class="label">Help</span></a>
</div>
`;
}
}
window.customElements.define("rustdoc-topbar", RustdocTopBarElement);

View file

@ -0,0 +1,165 @@
export = stringdex;
declare namespace stringdex {
/**
* The client interface to Stringdex.
*/
interface Database {
getIndex(colname: string): SearchTree|undefined;
getData(colname: string): DataColumn|undefined;
}
/**
* A search index file.
*/
interface SearchTree {
trie(): Trie;
search(name: Uint8Array|string): Promise<Trie?>;
searchLev(name: Uint8Array|string): AsyncGenerator<Trie>;
}
/**
* A compressed node in the search tree.
*
* This object logically addresses two interleaved trees:
* a "prefix tree", and a "suffix tree". If you ask for
* generic matches, you get both, but if you ask for one
* that excludes suffix-only entries, you'll get prefixes
* alone.
*/
interface Trie {
matches(): RoaringBitmap;
substringMatches(): AsyncGenerator<RoaringBitmap>;
prefixMatches(): AsyncGenerator<RoaringBitmap>;
keys(): Uint8Array;
keysExcludeSuffixOnly(): Uint8Array;
children(): [number, Promise<Trie>][];
childrenExcludeSuffixOnly(): [number, Promise<Trie>][];
child(id: number): Promise<Trie>?;
}
/**
* The client interface to Stringdex.
*/
interface DataColumn {
isEmpty(id: number): boolean;
at(id: number): Promise<Uint8Array|undefined>;
length: number,
}
/**
* Callbacks for a host application and VFS backend.
*
* These functions are calleb with mostly-raw data,
* except the JSONP wrapper is removed. For example,
* a file with the contents `rr_('{"A":"B"}')` should,
* after being pulled in, result in the `rr_` callback
* being invoked.
*
* The success callbacks don't need to supply the name of
* the file that succeeded, but, if you want successful error
* reporting, you'll need to remember which files are
* in flight and report the filename as the first parameter.
*/
interface Callbacks {
/**
* Load the root of the search database
* @param {string} dataString
*/
rr_: function(string);
err_rr_: function(any);
/**
* Load a nodefile in the search tree.
* A node file may contain multiple nodes;
* each node has five fields, separated by newlines.
* @param {string} inputBase64
*/
rn_: function(string);
err_rn_: function(string, any);
/**
* Load a database column partition from a string
* @param {string} dataString
*/
rd_: function(string);
err_rd_: function(string, any);
/**
* Load a database column partition from base64
* @param {string} dataString
*/
rb_: function(string);
err_rb_: function(string, any);
};
/**
* Hooks that a VFS layer must provide for stringdex to load data.
*
* When the root is loaded, the Callbacks object is provided. These
* functions should result in callback functions being called with
* the contents of the file, or in error callbacks being invoked with
* the failed-to-load filename.
*/
interface Hooks {
/**
* The first function invoked as part of loading a search database.
* This function must, eventually, invoke `rr_` with the string
* representation of the root file (the function call wrapper,
* `rr_('` and `')`, must be removed).
*
* The supplied callbacks object is used to feed search data back
* to the search engine core. You have to store it, so that
* loadTreeByHash and loadDataByNameAndHash can use it.
*
* If this fails, either throw an exception, or call `err_rr_`
* with the error object.
*/
loadRoot: function(Callbacks);
/**
* Load a subtree file from the search index.
*
* If this function succeeds, call `rn_` on the callbacks
* object. If it fails, call `err_rn_(hashHex, error)`.
*
* @param {string} hashHex
*/
loadTreeByHash: function(string);
/**
* Load a column partition from the search database.
*
* If this function succeeds, call `rd_` or `rb_` on the callbacks
* object. If it fails, call `err_rd_(hashHex, error)`. or `err_rb_`.
* To determine which one, the wrapping function call in the js file
* specifies it.
*
* @param {string} columnName
* @param {string} hashHex
*/
loadDataByNameAndHash: function(string, string);
};
class RoaringBitmap {
constructor(array: Uint8Array|null, start?: number);
static makeSingleton(number: number);
static everything(): RoaringBitmap;
static empty(): RoaringBitmap;
isEmpty(): boolean;
union(that: RoaringBitmap): RoaringBitmap;
intersection(that: RoaringBitmap): RoaringBitmap;
contains(number: number): boolean;
entries(): Generator<number>;
first(): number|null;
consumed_len_bytes: number;
};
type Stringdex = {
/**
* Initialize Stringdex with VFS hooks.
* Returns a database that you can use.
*/
loadDatabase: function(Hooks): Promise<Database>,
};
const Stringdex: Stringdex;
const RoaringBitmap: Class<stringdex.RoaringBitmap>;
}
declare global {
interface Window {
Stringdex: stringdex.Stringdex;
RoaringBitmap: Class<stringdex.RoaringBitmap>;
StringdexOnload: Array<function(stringdex.Stringdex): any>?;
};
}

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,6 @@
"skipLibCheck": true
},
"typeAcquisition": {
"include": ["./rustdoc.d.ts"]
"include": ["./rustdoc.d.ts", "./stringdex.d.ts"]
}
}

View file

@ -80,6 +80,7 @@ static_files! {
normalize_css => "static/css/normalize.css",
main_js => "static/js/main.js",
search_js => "static/js/search.js",
stringdex_js => "static/js/stringdex.js",
settings_js => "static/js/settings.js",
src_script_js => "static/js/src-script.js",
storage_js => "static/js/storage.js",

View file

@ -29,6 +29,7 @@
data-rustdoc-version="{{rustdoc_version}}" {#+ #}
data-channel="{{rust_channel}}" {#+ #}
data-search-js="{{files.search_js}}" {#+ #}
data-stringdex-js="{{files.stringdex_js}}" {#+ #}
data-settings-js="{{files.settings_js}}" {#+ #}
> {# #}
<script src="{{static_root_path|safe}}{{files.storage_js}}"></script>
@ -72,18 +73,9 @@
<![endif]-->
{{ layout.external_html.before_content|safe }}
{% if page.css_class != "src" %}
<nav class="mobile-topbar"> {# #}
<button class="sidebar-menu-toggle" title="show sidebar"></button>
{% if !layout.logo.is_empty() || page.rust_logo %}
<a class="logo-container" href="{{page.root_path|safe}}{{display_krate_with_trailing_slash|safe}}index.html">
{% if page.rust_logo %}
<img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="">
{% else if !layout.logo.is_empty() %}
<img src="{{layout.logo}}" alt="">
{% endif %}
</a>
{% endif %}
</nav>
<rustdoc-topbar> {# #}
<h2><a href="#">{{page.short_title}}</a></h2> {# #}
</rustdoc-topbar>
{% endif %}
<nav class="sidebar">
{% if page.css_class != "src" %}
@ -117,9 +109,6 @@
<div class="sidebar-resizer" title="Drag to resize sidebar"></div> {# #}
<main>
{% if page.css_class != "src" %}<div class="width-limiter">{% endif %}
{# defined in storage.js to avoid duplicating complex UI across every page #}
{# and because the search form only works if JS is enabled anyway #}
<rustdoc-search></rustdoc-search> {# #}
<section id="main-content" class="content">{{ content|safe }}</section>
{% if page.css_class != "src" %}</div>{% endif %}
</main>

View file

@ -12,8 +12,8 @@
<h1>
{{typ}}
<span{% if item_type != "mod" +%} class="{{item_type}}"{% endif %}>
{{name}}
</span> {# #}
{{name|wrapped|safe}}
</span>&nbsp;{# #}
<button id="copy-path" title="Copy item path to clipboard"> {# #}
Copy item path {# #}
</button> {# #}

@ -1 +1 @@
Subproject commit 840b83a10fb0e039a83f4d70ad032892c287570a
Subproject commit 71eb84f21aef43c07580c6aed6f806a6299f5042

View file

@ -2151,7 +2151,7 @@ impl<'test> TestCx<'test> {
#[rustfmt::skip]
let tidy_args = [
"--new-blocklevel-tags", "rustdoc-search,rustdoc-toolbar",
"--new-blocklevel-tags", "rustdoc-search,rustdoc-toolbar,rustdoc-topbar",
"--indent", "yes",
"--indent-spaces", "2",
"--wrap", "0",

View file

@ -31,7 +31,7 @@ fn check_html_file(file: &Path) -> usize {
.arg("--mute-id") // this option is useful in case we want to mute more warnings
.arg("yes")
.arg("--new-blocklevel-tags")
.arg("rustdoc-search,rustdoc-toolbar") // custom elements
.arg("rustdoc-search,rustdoc-toolbar,rustdoc-topbar") // custom elements
.arg("--mute")
.arg(&to_mute_s)
.arg(file);

View file

@ -78,6 +78,7 @@ native-lib = ["dep:libffi", "dep:libloading", "dep:capstone", "dep:ipc-channel",
[lints.rust.unexpected_cfgs]
level = "warn"
check-cfg = ['cfg(bootstrap)']
# Be aware that this file is inside a workspace when used via the
# submodule in the rustc repo. That means there are many cargo features

View file

@ -1 +1 @@
425a9c0a0e365c0b8c6cfd00c2ded83a73bed9a0
f605b57042ffeb320d7ae44490113a827139b766

View file

@ -1,3 +1,4 @@
#![cfg_attr(bootstrap, feature(strict_overflow_ops))]
#![feature(abort_unwind)]
#![feature(cfg_select)]
#![feature(rustc_private)]

View file

@ -1111,6 +1111,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
// For foreign items, try to see if we can emulate them.
if ecx.tcx.is_foreign_item(instance.def_id()) {
let _trace = enter_trace_span!("emulate_foreign_item");
// An external function call that does not have a MIR body. We either find MIR elsewhere
// or emulate its effect.
// This will be Ok(None) if we're emulating the intrinsic entirely within Miri (no need
@ -1123,6 +1124,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
}
// Otherwise, load the MIR.
let _trace = enter_trace_span!("load_mir");
interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
}

View file

@ -153,7 +153,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
return interp_ok(());
}
// Skip over items without an explicitly defined symbol name.
if !(attrs.export_name.is_some()
if !(attrs.symbol_name.is_some()
|| attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE)
|| attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL))
{

View file

@ -0,0 +1,34 @@
//@revisions: stack tree
//@[tree]compile-flags: -Zmiri-tree-borrows
// Validation forces more things into memory, which we can't have here.
//@compile-flags: -Zmiri-disable-validation
#![feature(custom_mir, core_intrinsics)]
use std::intrinsics::mir::*;
pub struct S(i32);
#[custom_mir(dialect = "runtime", phase = "optimized")]
fn main() {
mir! {
let _unit: ();
{
let staging = S(42); // This forces `staging` into memory...
let non_copy = staging; // ... so we move it to a non-inmemory local here.
// This specifically uses a type with scalar representation to tempt Miri to use the
// efficient way of storing local variables (outside adressable memory).
Call(_unit = callee(Move(non_copy), Move(non_copy)), ReturnTo(after_call), UnwindContinue())
//~[stack]^ ERROR: not granting access
//~[tree]| ERROR: /read access .* forbidden/
}
after_call = {
Return()
}
}
}
pub fn callee(x: S, mut y: S) {
// With the setup above, if `x` and `y` are both moved,
// then writing to `y` will change the value stored in `x`!
y.0 = 0;
assert_eq!(x.0, 42);
}

View file

@ -0,0 +1,25 @@
error: Undefined Behavior: not granting access to tag <TAG> because that would remove [Unique for <TAG>] which is strongly protected
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | Call(_unit = callee(Move(non_copy), Move(non_copy)), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
help: <TAG> was created here, as the root tag for ALLOC
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | Call(_unit = callee(Move(non_copy), Move(non_copy)), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
help: <TAG> is this argument
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | y.0 = 0;
| ^^^^^^^
= note: BACKTRACE (of the first span):
= note: inside `main` at tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
error: aborting due to 1 previous error

View file

@ -0,0 +1,34 @@
error: Undefined Behavior: read access through <TAG> (root of the allocation) at ALLOC[0x0] is forbidden
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | Call(_unit = callee(Move(non_copy), Move(non_copy)), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/tree-borrows.md for further information
= help: the accessed tag <TAG> (root of the allocation) is foreign to the protected tag <TAG> (i.e., it is not a child)
= help: this foreign read access would cause the protected tag <TAG> (currently Active) to become Disabled
= help: protected tags must never be Disabled
help: the accessed tag <TAG> was created here
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | Call(_unit = callee(Move(non_copy), Move(non_copy)), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
help: the protected tag <TAG> was created here, in the initial state Reserved
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | y.0 = 0;
| ^^^^^^^
help: the protected tag <TAG> later transitioned to Active due to a child write access at offsets [0x0..0x4]
--> tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
|
LL | y.0 = 0;
| ^^^^^^^
= help: this transition corresponds to the first write to a 2-phase borrowed mutable reference
= note: BACKTRACE (of the first span):
= note: inside `main` at tests/fail/function_calls/arg_inplace_locals_alias.rs:LL:CC
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
error: aborting due to 1 previous error

View file

@ -11,8 +11,8 @@ LL | unsafe { ptr.read() };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_read.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Uninitialized memory occurred at ALLOC[0x0..0x4], in this allocation:
ALLOC (stack variable, size: 4, align: 4) {

View file

@ -10,11 +10,11 @@ use std::intrinsics::mir::*;
pub fn main() {
mir! {
{
let x = 0;
let ptr = &raw mut x;
let _x = 0;
let ptr = &raw mut _x;
// We arrange for `myfun` to have a pointer that aliases
// its return place. Even just reading from that pointer is UB.
Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
}
after_call = {
@ -25,7 +25,7 @@ pub fn main() {
fn myfun(ptr: *mut i32) -> i32 {
unsafe { ptr.read() };
//~[stack]^ ERROR: not granting access
//~[stack]^ ERROR: does not exist in the borrow stack
//~[tree]| ERROR: /read access .* forbidden/
//~[none]| ERROR: uninitialized
// Without an aliasing model, reads are "fine" but at least they return uninit data.

View file

@ -1,8 +1,8 @@
error: Undefined Behavior: not granting access to tag <TAG> because that would remove [Unique for <TAG>] which is strongly protected
error: Undefined Behavior: attempting a read access using <TAG> at ALLOC[0x0], but that tag does not exist in the borrow stack for this location
--> tests/fail/function_calls/return_pointer_aliasing_read.rs:LL:CC
|
LL | unsafe { ptr.read() };
| ^^^^^^^^^^ Undefined Behavior occurred here
| ^^^^^^^^^^ this error occurs as part of an access at ALLOC[0x0..0x4]
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
@ -11,12 +11,12 @@ help: <TAG> was created by a SharedReadWrite retag at offsets [0x0..0x4]
|
LL | / mir! {
LL | | {
LL | | let x = 0;
LL | | let ptr = &raw mut x;
LL | | let _x = 0;
LL | | let ptr = &raw mut _x;
... |
LL | | }
| |_____^
help: <TAG> is this argument
help: <TAG> was later invalidated at offsets [0x0..0x4] by a Unique in-place function argument/return passing protection
--> tests/fail/function_calls/return_pointer_aliasing_read.rs:LL:CC
|
LL | unsafe { ptr.read() };
@ -26,8 +26,8 @@ LL | unsafe { ptr.read() };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_read.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -14,8 +14,8 @@ help: the accessed tag <TAG> was created here
|
LL | / mir! {
LL | | {
LL | | let x = 0;
LL | | let ptr = &raw mut x;
LL | | let _x = 0;
LL | | let ptr = &raw mut _x;
... |
LL | | }
| |_____^
@ -35,8 +35,8 @@ LL | unsafe { ptr.read() };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_read.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -14,7 +14,7 @@ pub fn main() {
let ptr = &raw mut _x;
// We arrange for `myfun` to have a pointer that aliases
// its return place. Writing to that pointer is UB.
Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
}
after_call = {
@ -26,7 +26,7 @@ pub fn main() {
fn myfun(ptr: *mut i32) -> i32 {
// This overwrites the return place, which shouldn't be possible through another pointer.
unsafe { ptr.write(0) };
//~[stack]^ ERROR: strongly protected
//~[stack]^ ERROR: does not exist in the borrow stack
//~[tree]| ERROR: /write access .* forbidden/
13
}

View file

@ -1,8 +1,8 @@
error: Undefined Behavior: not granting access to tag <TAG> because that would remove [Unique for <TAG>] which is strongly protected
error: Undefined Behavior: attempting a write access using <TAG> at ALLOC[0x0], but that tag does not exist in the borrow stack for this location
--> tests/fail/function_calls/return_pointer_aliasing_write.rs:LL:CC
|
LL | unsafe { ptr.write(0) };
| ^^^^^^^^^^^^ Undefined Behavior occurred here
| ^^^^^^^^^^^^ this error occurs as part of an access at ALLOC[0x0..0x4]
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
@ -16,7 +16,7 @@ LL | | let ptr = &raw mut _x;
... |
LL | | }
| |_____^
help: <TAG> is this argument
help: <TAG> was later invalidated at offsets [0x0..0x4] by a Unique in-place function argument/return passing protection
--> tests/fail/function_calls/return_pointer_aliasing_write.rs:LL:CC
|
LL | unsafe { ptr.write(0) };
@ -26,8 +26,8 @@ LL | unsafe { ptr.write(0) };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_write.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -35,8 +35,8 @@ LL | unsafe { ptr.write(0) };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_write.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -16,7 +16,7 @@ pub fn main() {
let ptr = &raw mut _x;
// We arrange for `myfun` to have a pointer that aliases
// its return place. Writing to that pointer is UB.
Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
}
after_call = {
@ -32,7 +32,7 @@ fn myfun(ptr: *mut i32) -> i32 {
fn myfun2(ptr: *mut i32) -> i32 {
// This overwrites the return place, which shouldn't be possible through another pointer.
unsafe { ptr.write(0) };
//~[stack]^ ERROR: strongly protected
//~[stack]^ ERROR: does not exist in the borrow stack
//~[tree]| ERROR: /write access .* forbidden/
13
}

View file

@ -1,8 +1,8 @@
error: Undefined Behavior: not granting access to tag <TAG> because that would remove [Unique for <TAG>] which is strongly protected
error: Undefined Behavior: attempting a write access using <TAG> at ALLOC[0x0], but that tag does not exist in the borrow stack for this location
--> tests/fail/function_calls/return_pointer_aliasing_write_tail_call.rs:LL:CC
|
LL | unsafe { ptr.write(0) };
| ^^^^^^^^^^^^ Undefined Behavior occurred here
| ^^^^^^^^^^^^ this error occurs as part of an access at ALLOC[0x0..0x4]
|
= help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
= help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
@ -16,18 +16,18 @@ LL | | let ptr = &raw mut _x;
... |
LL | | }
| |_____^
help: <TAG> is this argument
help: <TAG> was later invalidated at offsets [0x0..0x4] by a Unique in-place function argument/return passing protection
--> tests/fail/function_calls/return_pointer_aliasing_write_tail_call.rs:LL:CC
|
LL | unsafe { ptr.write(0) };
| ^^^^^^^^^^^^^^^^^^^^^^^
LL | become myfun2(ptr)
| ^^^^^^^^^^^^^^^^^^
= note: BACKTRACE (of the first span):
= note: inside `myfun2` at tests/fail/function_calls/return_pointer_aliasing_write_tail_call.rs:LL:CC
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_write_tail_call.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -35,8 +35,8 @@ LL | unsafe { ptr.write(0) };
note: inside `main`
--> tests/fail/function_calls/return_pointer_aliasing_write_tail_call.rs:LL:CC
|
LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info)
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace

View file

@ -14,6 +14,7 @@ pkgs.mkShell {
packages = [
pkgs.git
pkgs.nix
pkgs.glibc.static
x
# Get the runtime deps of the x wrapper
] ++ lists.flatten (attrsets.attrValues env);

View file

@ -1,7 +1,8 @@
/* global globalThis */
const fs = require("fs");
const path = require("path");
const { isGeneratorObject } = require("util/types");
function arrayToCode(array) {
return array.map((value, index) => {
@ -45,23 +46,16 @@ function shouldIgnoreField(fieldName) {
}
function valueMapper(key, testOutput) {
const isAlias = testOutput["is_alias"];
let value = testOutput[key];
// To make our life easier, if there is a "parent" type, we add it to the path.
if (key === "path") {
if (testOutput["parent"] !== undefined) {
if (testOutput["parent"]) {
if (value.length > 0) {
value += "::" + testOutput["parent"]["name"];
} else {
value = testOutput["parent"]["name"];
}
} else if (testOutput["is_alias"]) {
value = valueMapper(key, testOutput["original"]);
}
} else if (isAlias && key === "alias") {
value = testOutput["name"];
} else if (isAlias && ["name"].includes(key)) {
value = testOutput["original"][key];
}
return value;
}
@ -237,7 +231,7 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
const ignore_order = loadedFile.ignore_order;
const exact_check = loadedFile.exact_check;
const results = await doSearch(query, loadedFile.FILTER_CRATE);
const { resultsTable } = await doSearch(query, loadedFile.FILTER_CRATE);
const error_text = [];
for (const key in expected) {
@ -247,37 +241,38 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
if (!Object.prototype.hasOwnProperty.call(expected, key)) {
continue;
}
if (!Object.prototype.hasOwnProperty.call(results, key)) {
if (!Object.prototype.hasOwnProperty.call(resultsTable, key)) {
error_text.push("==> Unknown key \"" + key + "\"");
break;
}
const entry = expected[key];
if (exact_check && entry.length !== results[key].length) {
if (exact_check && entry.length !== resultsTable[key].length) {
error_text.push(queryName + "==> Expected exactly " + entry.length +
" results but found " + results[key].length + " in '" + key + "'");
" results but found " + resultsTable[key].length + " in '" + key + "'");
}
let prev_pos = -1;
for (const [index, elem] of entry.entries()) {
const entry_pos = lookForEntry(elem, results[key]);
const entry_pos = lookForEntry(elem, resultsTable[key]);
if (entry_pos === -1) {
error_text.push(queryName + "==> Result not found in '" + key + "': '" +
JSON.stringify(elem) + "'");
// By default, we just compare the two first items.
let item_to_diff = 0;
if ((!ignore_order || exact_check) && index < results[key].length) {
if ((!ignore_order || exact_check) && index < resultsTable[key].length) {
item_to_diff = index;
}
error_text.push("Diff of first error:\n" +
betterLookingDiff(elem, results[key][item_to_diff]));
betterLookingDiff(elem, resultsTable[key][item_to_diff]));
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
error_text.push(queryName + "==> Exact check failed at position " + (prev_pos + 1) +
": expected '" + JSON.stringify(elem) + "' but found '" +
JSON.stringify(results[key][index]) + "'");
JSON.stringify(resultsTable[key][index]) + "'");
} else if (ignore_order === false && entry_pos < prev_pos) {
error_text.push(queryName + "==> '" + JSON.stringify(elem) + "' was supposed " +
"to be before '" + JSON.stringify(results[key][prev_pos]) + "'");
error_text.push(queryName + "==> '" +
JSON.stringify(elem) + "' was supposed to be before '" +
JSON.stringify(resultsTable[key][prev_pos]) + "'");
} else {
prev_pos = entry_pos;
}
@ -286,19 +281,20 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
return error_text;
}
async function runCorrections(query, corrections, getCorrections, loadedFile) {
const qc = await getCorrections(query, loadedFile.FILTER_CRATE);
async function runCorrections(query, corrections, doSearch, loadedFile) {
const { parsedQuery } = await doSearch(query, loadedFile.FILTER_CRATE);
const qc = parsedQuery.correction;
const error_text = [];
if (corrections === null) {
if (qc !== null) {
error_text.push(`==> expected = null, found = ${qc}`);
error_text.push(`==> [correction] expected = null, found = ${qc}`);
}
return error_text;
}
if (qc !== corrections.toLowerCase()) {
error_text.push(`==> expected = ${corrections}, found = ${qc}`);
if (qc.toLowerCase() !== corrections.toLowerCase()) {
error_text.push(`==> [correction] expected = ${corrections}, found = ${qc}`);
}
return error_text;
@ -320,7 +316,7 @@ function checkResult(error_text, loadedFile, displaySuccess) {
return 1;
}
async function runCheckInner(callback, loadedFile, entry, getCorrections, extra) {
async function runCheckInner(callback, loadedFile, entry, extra, doSearch) {
if (typeof entry.query !== "string") {
console.log("FAILED");
console.log("==> Missing `query` field");
@ -338,7 +334,7 @@ async function runCheckInner(callback, loadedFile, entry, getCorrections, extra)
error_text = await runCorrections(
entry.query,
entry.correction,
getCorrections,
doSearch,
loadedFile,
);
if (checkResult(error_text, loadedFile, false) !== 0) {
@ -348,16 +344,16 @@ async function runCheckInner(callback, loadedFile, entry, getCorrections, extra)
return true;
}
async function runCheck(loadedFile, key, getCorrections, callback) {
async function runCheck(loadedFile, key, doSearch, callback) {
const expected = loadedFile[key];
if (Array.isArray(expected)) {
for (const entry of expected) {
if (!await runCheckInner(callback, loadedFile, entry, getCorrections, true)) {
if (!await runCheckInner(callback, loadedFile, entry, true, doSearch)) {
return 1;
}
}
} else if (!await runCheckInner(callback, loadedFile, expected, getCorrections, false)) {
} else if (!await runCheckInner(callback, loadedFile, expected, false, doSearch)) {
return 1;
}
console.log("OK");
@ -368,7 +364,7 @@ function hasCheck(content, checkName) {
return content.startsWith(`const ${checkName}`) || content.includes(`\nconst ${checkName}`);
}
async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
async function runChecks(testFile, doSearch, parseQuery) {
let checkExpected = false;
let checkParsed = false;
let testFileContent = readFile(testFile);
@ -397,12 +393,12 @@ async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
let res = 0;
if (checkExpected) {
res += await runCheck(loadedFile, "EXPECTED", getCorrections, (query, expected, text) => {
res += await runCheck(loadedFile, "EXPECTED", doSearch, (query, expected, text) => {
return runSearch(query, expected, doSearch, loadedFile, text);
});
}
if (checkParsed) {
res += await runCheck(loadedFile, "PARSED", getCorrections, (query, expected, text) => {
res += await runCheck(loadedFile, "PARSED", doSearch, (query, expected, text) => {
return runParser(query, expected, parseQuery, text);
});
}
@ -416,71 +412,89 @@ async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
* @param {string} resource_suffix - Version number between filename and .js, e.g. "1.59.0"
* @returns {Object} - Object containing keys: `doSearch`, which runs a search
* with the loaded index and returns a table of results; `parseQuery`, which is the
* `parseQuery` function exported from the search module; and `getCorrections`, which runs
* `parseQuery` function exported from the search module, which runs
* a search but returns type name corrections instead of results.
*/
function loadSearchJS(doc_folder, resource_suffix) {
const searchIndexJs = path.join(doc_folder, "search-index" + resource_suffix + ".js");
const searchIndex = require(searchIndexJs);
globalThis.searchState = {
descShards: new Map(),
loadDesc: async function({descShard, descIndex}) {
if (descShard.promise === null) {
descShard.promise = new Promise((resolve, reject) => {
descShard.resolve = resolve;
const ds = descShard;
const fname = `${ds.crate}-desc-${ds.shard}-${resource_suffix}.js`;
fs.readFile(
`${doc_folder}/search.desc/${descShard.crate}/${fname}`,
(err, data) => {
if (err) {
reject(err);
} else {
eval(data.toString("utf8"));
}
},
);
});
}
const list = await descShard.promise;
return list[descIndex];
},
loadedDescShard: function(crate, shard, data) {
this.descShards.get(crate)[shard].resolve(data.split("\n"));
},
};
async function loadSearchJS(doc_folder, resource_suffix) {
const staticFiles = path.join(doc_folder, "static.files");
const stringdexJs = fs.readdirSync(staticFiles).find(f => f.match(/stringdex.*\.js$/));
const stringdexModule = require(path.join(staticFiles, stringdexJs));
const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
const searchModule = require(path.join(staticFiles, searchJs));
searchModule.initSearch(searchIndex.searchIndex);
const docSearch = searchModule.docSearch;
globalThis.nonnull = (x, msg) => {
if (x === null) {
throw (msg || "unexpected null value!");
} else {
return x;
}
};
const { docSearch, DocSearch } = await searchModule.initSearch(
stringdexModule.Stringdex,
stringdexModule.RoaringBitmap,
{
loadRoot: callbacks => {
for (const key in callbacks) {
if (Object.hasOwn(callbacks, key)) {
globalThis[key] = callbacks[key];
}
}
const rootJs = readFile(path.join(doc_folder, "search.index/root" +
resource_suffix + ".js"));
eval(rootJs);
},
loadTreeByHash: hashHex => {
const shardJs = readFile(path.join(doc_folder, "search.index/" + hashHex + ".js"));
eval(shardJs);
},
loadDataByNameAndHash: (name, hashHex) => {
const shardJs = readFile(path.join(doc_folder, "search.index/" + name + "/" +
hashHex + ".js"));
eval(shardJs);
},
},
);
return {
doSearch: async function(queryStr, filterCrate, currentCrate) {
const result = await docSearch.execQuery(searchModule.parseQuery(queryStr),
filterCrate, currentCrate);
const parsedQuery = DocSearch.parseQuery(queryStr);
const result = await docSearch.execQuery(parsedQuery, filterCrate, currentCrate);
const resultsTable = {};
for (const tab in result) {
if (!Object.prototype.hasOwnProperty.call(result, tab)) {
continue;
}
if (!(result[tab] instanceof Array)) {
if (!isGeneratorObject(result[tab])) {
continue;
}
for (const entry of result[tab]) {
resultsTable[tab] = [];
for await (const entry of result[tab]) {
const flatEntry = Object.assign({
crate: entry.item.crate,
name: entry.item.name,
path: entry.item.modulePath,
exactPath: entry.item.exactModulePath,
ty: entry.item.ty,
}, entry);
for (const key in entry) {
if (!Object.prototype.hasOwnProperty.call(entry, key)) {
continue;
}
if (key === "displayTypeSignature" && entry.displayTypeSignature !== null) {
const {type, mappedNames, whereClause} =
await entry.displayTypeSignature;
entry.displayType = arrayToCode(type);
entry.displayMappedNames = [...mappedNames.entries()]
if (key === "desc" && entry.desc !== null) {
flatEntry.desc = await entry.desc;
} else if (key === "displayTypeSignature" &&
entry.displayTypeSignature !== null
) {
flatEntry.displayTypeSignature = await entry.displayTypeSignature;
const {
type,
mappedNames,
whereClause,
} = flatEntry.displayTypeSignature;
flatEntry.displayType = arrayToCode(type);
flatEntry.displayMappedNames = [...mappedNames.entries()]
.map(([name, qname]) => {
return `${name} = ${qname}`;
}).join(", ");
entry.displayWhereClause = [...whereClause.entries()]
flatEntry.displayWhereClause = [...whereClause.entries()]
.flatMap(([name, value]) => {
if (value.length === 0) {
return [];
@ -489,16 +503,12 @@ function loadSearchJS(doc_folder, resource_suffix) {
}).join(", ");
}
}
resultsTable[tab].push(flatEntry);
}
}
return result;
return { resultsTable, parsedQuery };
},
getCorrections: function(queryStr, filterCrate, currentCrate) {
const parsedQuery = searchModule.parseQuery(queryStr);
docSearch.execQuery(parsedQuery, filterCrate, currentCrate);
return parsedQuery.correction;
},
parseQuery: searchModule.parseQuery,
parseQuery: DocSearch.parseQuery,
};
}
@ -570,7 +580,7 @@ async function main(argv) {
return 1;
}
const parseAndSearch = loadSearchJS(
const parseAndSearch = await loadSearchJS(
opts["doc_folder"],
opts["resource_suffix"],
);
@ -579,14 +589,11 @@ async function main(argv) {
const doSearch = function(queryStr, filterCrate) {
return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
};
const getCorrections = function(queryStr, filterCrate) {
return parseAndSearch.getCorrections(queryStr, filterCrate, opts["crate_name"]);
};
if (opts["test_file"].length !== 0) {
for (const file of opts["test_file"]) {
process.stdout.write(`Testing ${file} ... `);
errors += await runChecks(file, doSearch, parseAndSearch.parseQuery, getCorrections);
errors += await runChecks(file, doSearch, parseAndSearch.parseQuery);
}
} else if (opts["test_folder"].length !== 0) {
for (const file of fs.readdirSync(opts["test_folder"])) {
@ -595,7 +602,7 @@ async function main(argv) {
}
process.stdout.write(`Testing ${file} ... `);
errors += await runChecks(path.join(opts["test_folder"], file), doSearch,
parseAndSearch.parseQuery, getCorrections);
parseAndSearch.parseQuery);
}
}
return errors > 0 ? 1 : 0;

View file

@ -41,7 +41,6 @@ const RUFF_CONFIG_PATH: &[&str] = &["src", "tools", "tidy", "config", "ruff.toml
const RUFF_CACHE_PATH: &[&str] = &["cache", "ruff_cache"];
const PIP_REQ_PATH: &[&str] = &["src", "tools", "tidy", "config", "requirements.txt"];
// this must be kept in sync with with .github/workflows/spellcheck.yml
const SPELLCHECK_DIRS: &[&str] = &["compiler", "library", "src/bootstrap", "src/librustdoc"];
pub fn check(
@ -51,6 +50,7 @@ pub fn check(
librustdoc_path: &Path,
tools_path: &Path,
npm: &Path,
cargo: &Path,
bless: bool,
extra_checks: Option<&str>,
pos_args: &[String],
@ -63,6 +63,7 @@ pub fn check(
librustdoc_path,
tools_path,
npm,
cargo,
bless,
extra_checks,
pos_args,
@ -78,6 +79,7 @@ fn check_impl(
librustdoc_path: &Path,
tools_path: &Path,
npm: &Path,
cargo: &Path,
bless: bool,
extra_checks: Option<&str>,
pos_args: &[String],
@ -293,7 +295,7 @@ fn check_impl(
} else {
eprintln!("spellcheck files");
}
spellcheck_runner(&args)?;
spellcheck_runner(root_path, &outdir, &cargo, &args)?;
}
if js_lint || js_typecheck {
@ -576,34 +578,25 @@ fn shellcheck_runner(args: &[&OsStr]) -> Result<(), Error> {
if status.success() { Ok(()) } else { Err(Error::FailedCheck("shellcheck")) }
}
/// Check that spellchecker is installed then run it at the given path
fn spellcheck_runner(args: &[&str]) -> Result<(), Error> {
// sync version with .github/workflows/spellcheck.yml
let expected_version = "typos-cli 1.34.0";
match Command::new("typos").arg("--version").output() {
Ok(o) => {
let stdout = String::from_utf8_lossy(&o.stdout);
if stdout.trim() != expected_version {
return Err(Error::Version {
program: "typos",
required: expected_version,
installed: stdout.trim().to_string(),
});
/// Ensure that spellchecker is installed then run it at the given path
fn spellcheck_runner(
src_root: &Path,
outdir: &Path,
cargo: &Path,
args: &[&str],
) -> Result<(), Error> {
let bin_path =
crate::ensure_version_or_cargo_install(outdir, cargo, "typos-cli", "typos", "1.34.0")?;
match Command::new(bin_path).current_dir(src_root).args(args).status() {
Ok(status) => {
if status.success() {
Ok(())
} else {
Err(Error::FailedCheck("typos"))
}
}
Err(e) if e.kind() == io::ErrorKind::NotFound => {
return Err(Error::MissingReq(
"typos",
"spellcheck file checks",
// sync version with .github/workflows/spellcheck.yml
Some("install tool via `cargo install typos-cli@1.34.0`".to_owned()),
));
}
Err(e) => return Err(e.into()),
Err(err) => Err(Error::Generic(format!("failed to run typos tool: {err:?}"))),
}
let status = Command::new("typos").args(args).status()?;
if status.success() { Ok(()) } else { Err(Error::FailedCheck("typos")) }
}
/// Check git for tracked files matching an extension

View file

@ -4,7 +4,9 @@
//! to be used by tools.
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, io};
use build_helper::ci::CiEnv;
use build_helper::git::{GitConfig, get_closest_upstream_commit};
@ -180,6 +182,70 @@ pub fn files_modified(ci_info: &CiInfo, pred: impl Fn(&str) -> bool) -> bool {
!v.is_empty()
}
/// If the given executable is installed with the given version, use that,
/// otherwise install via cargo.
pub fn ensure_version_or_cargo_install(
build_dir: &Path,
cargo: &Path,
pkg_name: &str,
bin_name: &str,
version: &str,
) -> io::Result<PathBuf> {
// ignore the process exit code here and instead just let the version number check fail.
// we also importantly don't return if the program wasn't installed,
// instead we want to continue to the fallback.
'ck: {
// FIXME: rewrite as if-let chain once this crate is 2024 edition.
let Ok(output) = Command::new(bin_name).arg("--version").output() else {
break 'ck;
};
let Ok(s) = str::from_utf8(&output.stdout) else {
break 'ck;
};
let Some(v) = s.trim().split_whitespace().last() else {
break 'ck;
};
if v == version {
return Ok(PathBuf::from(bin_name));
}
}
let tool_root_dir = build_dir.join("misc-tools");
let tool_bin_dir = tool_root_dir.join("bin");
eprintln!("building external tool {bin_name} from package {pkg_name}@{version}");
// use --force to ensure that if the required version is bumped, we update it.
// use --target-dir to ensure we have a build cache so repeated invocations aren't slow.
// modify PATH so that cargo doesn't print a warning telling the user to modify the path.
let cargo_exit_code = Command::new(cargo)
.args(["install", "--locked", "--force", "--quiet"])
.arg("--root")
.arg(&tool_root_dir)
.arg("--target-dir")
.arg(tool_root_dir.join("target"))
.arg(format!("{pkg_name}@{version}"))
.env(
"PATH",
env::join_paths(
env::split_paths(&env::var("PATH").unwrap())
.chain(std::iter::once(tool_bin_dir.clone())),
)
.expect("build dir contains invalid char"),
)
.env("RUSTFLAGS", "-Copt-level=0")
.spawn()?
.wait()?;
if !cargo_exit_code.success() {
return Err(io::Error::other("cargo install failed"));
}
let bin_path = tool_bin_dir.join(bin_name);
assert!(
matches!(bin_path.try_exists(), Ok(true)),
"cargo install did not produce the expected binary"
);
eprintln!("finished building tool {bin_name}");
Ok(bin_path)
}
pub mod alphabetical;
pub mod bins;
pub mod debug_artifacts;

View file

@ -184,6 +184,7 @@ fn main() {
&librustdoc_path,
&tools_path,
&npm,
&cargo,
bless,
extra_checks,
pos_args