Merge pull request #2526 from rust-lang/rustc-pull

Rustc pull update
This commit is contained in:
Tshepang Mbambo 2025-08-02 00:40:01 +02:00 committed by GitHub
commit 171c2478c0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
790 changed files with 13871 additions and 8026 deletions

View file

@ -14,6 +14,8 @@ test-stage = 2
doc-stage = 2
# Contributors working on tools will probably expect compiler docs to be generated, so they can figure out how to use the API.
compiler-docs = true
# Contributors working on tools are the most likely to change non-rust programs.
tidy-extra-checks = "auto:js,auto:py,auto:cpp,auto:spellcheck"
[llvm]
# Will download LLVM from CI if available on your platform.

View file

@ -556,3 +556,9 @@ tool_check_step!(Compiletest {
allow_features: COMPILETEST_ALLOW_FEATURES,
default: false,
});
tool_check_step!(Linkchecker {
path: "src/tools/linkchecker",
mode: |_builder| Mode::ToolBootstrap,
default: false
});

View file

@ -8,6 +8,9 @@ use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{env, fs, iter};
#[cfg(feature = "tracing")]
use tracing::instrument;
use crate::core::build_steps::compile::{Std, run_cargo};
use crate::core::build_steps::doc::DocumentationFormat;
use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags};
@ -30,7 +33,7 @@ use crate::utils::helpers::{
linker_flags, t, target_supports_cranelift_backend, up_to_date,
};
use crate::utils::render_tests::{add_flags_and_try_run_tests, try_run_tests};
use crate::{CLang, DocTests, GitRepo, Mode, PathSet, envify};
use crate::{CLang, DocTests, GitRepo, Mode, PathSet, debug, envify};
const ADB_TEST_DIR: &str = "/data/local/tmp/work";
@ -713,9 +716,23 @@ impl Step for CompiletestTest {
}
/// Runs `cargo test` for compiletest.
#[cfg_attr(
feature = "tracing",
instrument(level = "debug", name = "CompiletestTest::run", skip_all)
)]
fn run(self, builder: &Builder<'_>) {
let host = self.host;
if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 {
eprintln!("\
ERROR: `--stage 0` runs compiletest self-tests against the stage0 (precompiled) compiler, not the in-tree compiler, and will almost always cause tests to fail
NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`."
);
crate::exit!(1);
}
let compiler = builder.compiler(builder.top_stage, host);
debug!(?compiler);
// We need `ToolStd` for the locally-built sysroot because
// compiletest uses unstable features of the `test` crate.
@ -723,8 +740,8 @@ impl Step for CompiletestTest {
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
// compiletest uses libtest internals; make it use the in-tree std to make sure it never breaks
// when std sources change.
// compiletest uses libtest internals; make it use the in-tree std to make sure it never
// breaks when std sources change.
Mode::ToolStd,
host,
Kind::Test,
@ -1612,12 +1629,11 @@ impl Step for Compiletest {
return;
}
if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() {
if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 {
eprintln!("\
ERROR: `--stage 0` runs compiletest on the stage0 (precompiled) compiler, not your local changes, and will almost always cause tests to fail
HELP: to test the compiler, use `--stage 1` instead
HELP: to test the standard library, use `--stage 0 library/std` instead
NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`."
HELP: to test the compiler or standard library, omit the stage or explicitly use `--stage 1` instead
NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`."
);
crate::exit!(1);
}

View file

@ -1033,6 +1033,7 @@ impl<'a> Builder<'a> {
check::Compiletest,
check::FeaturesStatusDump,
check::CoverageDump,
check::Linkchecker,
// This has special staging logic, it may run on stage 1 while others run on stage 0.
// It takes quite some time to build stage 1, so put this at the end.
//

View file

@ -45,7 +45,9 @@ use crate::core::config::{
DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt, RustcLto, SplitDebuginfo,
StringOrBool, set, threads_from_config,
};
use crate::core::download::is_download_ci_available;
use crate::core::download::{
DownloadContext, download_beta_toolchain, is_download_ci_available, maybe_download_rustfmt,
};
use crate::utils::channel;
use crate::utils::exec::{ExecutionContext, command};
use crate::utils::helpers::{exe, get_host_target};
@ -296,8 +298,16 @@ pub struct Config {
/// Command for visual diff display, e.g. `diff-tool --color=always`.
pub compiletest_diff_tool: Option<String>,
/// Whether to allow running both `compiletest` self-tests and `compiletest`-managed test suites
/// against the stage 0 (rustc, std).
///
/// This is only intended to be used when the stage 0 compiler is actually built from in-tree
/// sources.
pub compiletest_allow_stage0: bool,
/// Whether to use the precompiled stage0 libtest with compiletest.
pub compiletest_use_stage0_libtest: bool,
/// Default value for `--extra-checks`
pub tidy_extra_checks: Option<String>,
pub is_running_on_ci: bool,
@ -747,6 +757,7 @@ impl Config {
optimized_compiler_builtins,
jobs,
compiletest_diff_tool,
compiletest_allow_stage0,
compiletest_use_stage0_libtest,
tidy_extra_checks,
ccache,
@ -795,13 +806,19 @@ impl Config {
);
}
config.patch_binaries_for_nix = patch_binaries_for_nix;
config.bootstrap_cache_path = bootstrap_cache_path;
config.llvm_assertions =
toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false));
config.initial_rustc = if let Some(rustc) = rustc {
if !flags_skip_stage0_validation {
config.check_stage0_version(&rustc, "rustc");
}
rustc
} else {
config.download_beta_toolchain();
let dwn_ctx = DownloadContext::from(&config);
download_beta_toolchain(dwn_ctx);
config
.out
.join(config.host_target)
@ -827,7 +844,8 @@ impl Config {
}
cargo
} else {
config.download_beta_toolchain();
let dwn_ctx = DownloadContext::from(&config);
download_beta_toolchain(dwn_ctx);
config.initial_sysroot.join("bin").join(exe("cargo", config.host_target))
};
@ -863,7 +881,6 @@ impl Config {
config.reuse = reuse.map(PathBuf::from);
config.submodules = submodules;
config.android_ndk = android_ndk;
config.bootstrap_cache_path = bootstrap_cache_path;
set(&mut config.low_priority, low_priority);
set(&mut config.compiler_docs, compiler_docs);
set(&mut config.library_docs_private_items, library_docs_private_items);
@ -882,7 +899,6 @@ impl Config {
set(&mut config.local_rebuild, local_rebuild);
set(&mut config.print_step_timings, print_step_timings);
set(&mut config.print_step_rusage, print_step_rusage);
config.patch_binaries_for_nix = patch_binaries_for_nix;
config.verbose = cmp::max(config.verbose, flags_verbose as usize);
@ -891,9 +907,6 @@ impl Config {
config.apply_install_config(toml.install);
config.llvm_assertions =
toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false));
let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel")));
let ci_channel = file_content.trim_end();
@ -994,8 +1007,12 @@ impl Config {
config.apply_dist_config(toml.dist);
config.initial_rustfmt =
if let Some(r) = rustfmt { Some(r) } else { config.maybe_download_rustfmt() };
config.initial_rustfmt = if let Some(r) = rustfmt {
Some(r)
} else {
let dwn_ctx = DownloadContext::from(&config);
maybe_download_rustfmt(dwn_ctx)
};
if matches!(config.lld_mode, LldMode::SelfContained)
&& !config.lld_enabled
@ -1012,8 +1029,12 @@ impl Config {
config.optimized_compiler_builtins =
optimized_compiler_builtins.unwrap_or(config.channel != "dev");
config.compiletest_diff_tool = compiletest_diff_tool;
config.compiletest_allow_stage0 = compiletest_allow_stage0.unwrap_or(false);
config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true);
config.tidy_extra_checks = tidy_extra_checks;
let download_rustc = config.download_rustc_commit.is_some();

View file

@ -68,6 +68,7 @@ define_config! {
optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
jobs: Option<u32> = "jobs",
compiletest_diff_tool: Option<String> = "compiletest-diff-tool",
compiletest_allow_stage0: Option<bool> = "compiletest-allow-stage0",
compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest",
tidy_extra_checks: Option<String> = "tidy-extra-checks",
ccache: Option<StringOrBool> = "ccache",

File diff suppressed because it is too large Load diff

View file

@ -486,4 +486,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Warning,
summary: "Removed `rust.description` and `llvm.ccache` as it was deprecated in #137723 and #136941 long time ago.",
},
ChangeInfo {
change_id: 144675,
severity: ChangeSeverity::Warning,
summary: "Added `build.compiletest-allow-stage0` flag instead of `COMPILETEST_FORCE_STAGE0` env var, and reject running `compiletest` self tests against stage 0 rustc unless explicitly allowed.",
},
];

View file

@ -3,6 +3,7 @@
/// See <https://github.com/rust-lang/rust/issues/134863>
pub static CRATES: &[&str] = &[
// tidy-alphabetical-start
"allocator-api2",
"annotate-snippets",
"anstyle",
"askama_parser",
@ -16,13 +17,17 @@ pub static CRATES: &[&str] = &[
"darling_core",
"derive_builder_core",
"digest",
"equivalent",
"fluent-bundle",
"fluent-langneg",
"fluent-syntax",
"fnv",
"foldhash",
"generic-array",
"hashbrown",
"heck",
"ident_case",
"indexmap",
"intl-memoizer",
"intl_pluralrules",
"libc",

View file

@ -81,9 +81,9 @@ RUN /tmp/build-fuchsia-toolchain.sh
COPY host-x86_64/dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/
RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh
RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \
RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \
tar -xz
ENV WASI_SDK_PATH=/tmp/wasi-sdk-25.0-x86_64-linux
ENV WASI_SDK_PATH=/tmp/wasi-sdk-27.0-x86_64-linux
COPY scripts/freebsd-toolchain.sh /tmp/
RUN /tmp/freebsd-toolchain.sh i686

View file

@ -43,7 +43,6 @@ ENV SCRIPT \
python3 ../x.py check bootstrap && \
/scripts/check-default-config-profiles.sh && \
python3 ../x.py build src/tools/build-manifest && \
python3 ../x.py test --stage 0 src/tools/compiletest && \
python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \
python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
python3 ../x.py check --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \

View file

@ -30,6 +30,7 @@ ENV SCRIPT \
python3 ../x.py check && \
python3 ../x.py clippy ci && \
python3 ../x.py test --stage 1 core alloc std test proc_macro && \
python3 ../x.py test --stage 1 src/tools/compiletest && \
python3 ../x.py doc --stage 0 bootstrap && \
# Build both public and internal documentation.
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \
@ -37,6 +38,6 @@ ENV SCRIPT \
mkdir -p /checkout/obj/staging/doc && \
cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 1 library/test && \
# The BOOTSTRAP_TRACING flag is added to verify whether the
# The BOOTSTRAP_TRACING flag is added to verify whether the
# bootstrap process compiles successfully with this flag enabled.
BOOTSTRAP_TRACING=1 python3 ../x.py --help

View file

@ -40,9 +40,9 @@ WORKDIR /
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \
RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \
tar -xz
ENV WASI_SDK_PATH=/wasi-sdk-25.0-x86_64-linux
ENV WASI_SDK_PATH=/wasi-sdk-27.0-x86_64-linux
ENV RUST_CONFIGURE_ARGS \
--musl-root-x86_64=/usr/local/x86_64-linux-musl \

View file

@ -31,20 +31,11 @@ runners:
<<: *base-job
- &job-windows
os: windows-2022
<<: *base-job
# NOTE: windows-2025 has less disk space available than windows-2022,
# because the D drive is missing.
- &job-windows-25
os: windows-2025
free_disk: true
<<: *base-job
- &job-windows-8c
os: windows-2022-8core-32gb
<<: *base-job
- &job-windows-25-8c
os: windows-2025-8core-32gb
<<: *base-job
@ -491,7 +482,7 @@ auto:
NO_LLVM_ASSERTIONS: 1
NO_DEBUG_ASSERTIONS: 1
NO_OVERFLOW_CHECKS: 1
<<: *job-macos
<<: *job-macos-m1
- name: x86_64-apple-1
env:
@ -668,7 +659,7 @@ auto:
SCRIPT: python x.py build --set rust.debug=true opt-dist && PGO_HOST=x86_64-pc-windows-msvc ./build/x86_64-pc-windows-msvc/stage0-tools-bin/opt-dist windows-ci -- python x.py dist bootstrap --include-default-paths
DIST_REQUIRE_ALL_TOOLS: 1
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-windows-25-8c
<<: *job-windows-8c
- name: dist-i686-msvc
env:

View file

@ -0,0 +1,265 @@
#!/bin/bash
set -euo pipefail
# Free disk space on Linux GitHub action runners
# Script inspired by https://github.com/jlumbroso/free-disk-space
isX86() {
local arch
arch=$(uname -m)
if [ "$arch" = "x86_64" ]; then
return 0
else
return 1
fi
}
# Check if we're on a GitHub hosted runner.
# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted".
isGitHubRunner() {
# `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string".
if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then
return 0
else
return 1
fi
}
# print a line of the specified character
printSeparationLine() {
for ((i = 0; i < 80; i++)); do
printf "%s" "$1"
done
printf "\n"
}
# compute available space
# REF: https://unix.stackexchange.com/a/42049/60849
# REF: https://stackoverflow.com/a/450821/408734
getAvailableSpace() {
df -a | awk 'NR > 1 {avail+=$4} END {print avail}'
}
# make Kb human readable (assume the input is Kb)
# REF: https://unix.stackexchange.com/a/44087/60849
formatByteCount() {
numfmt --to=iec-i --suffix=B --padding=7 "${1}000"
}
# macro to output saved space
printSavedSpace() {
# Disk space before the operation
local before=${1}
local title=${2:-}
local after
after=$(getAvailableSpace)
local saved=$((after - before))
if [ "$saved" -lt 0 ]; then
echo "::warning::Saved space is negative: $saved. Using '0' as saved space."
saved=0
fi
echo ""
printSeparationLine "*"
if [ -n "${title}" ]; then
echo "=> ${title}: Saved $(formatByteCount "$saved")"
else
echo "=> Saved $(formatByteCount "$saved")"
fi
printSeparationLine "*"
echo ""
}
# macro to print output of df with caption
printDF() {
local caption=${1}
printSeparationLine "="
echo "${caption}"
echo ""
echo "$ df -h"
echo ""
df -h
printSeparationLine "="
}
removeUnusedFilesAndDirs() {
local to_remove=(
"/usr/share/java"
)
if isGitHubRunner; then
to_remove+=(
"/usr/local/aws-sam-cli"
"/usr/local/doc/cmake"
"/usr/local/julia"*
"/usr/local/lib/android"
"/usr/local/share/chromedriver-"*
"/usr/local/share/chromium"
"/usr/local/share/cmake-"*
"/usr/local/share/edge_driver"
"/usr/local/share/emacs"
"/usr/local/share/gecko_driver"
"/usr/local/share/icons"
"/usr/local/share/powershell"
"/usr/local/share/vcpkg"
"/usr/local/share/vim"
"/usr/share/apache-maven-"*
"/usr/share/gradle-"*
"/usr/share/kotlinc"
"/usr/share/miniconda"
"/usr/share/php"
"/usr/share/ri"
"/usr/share/swift"
# binaries
"/usr/local/bin/azcopy"
"/usr/local/bin/bicep"
"/usr/local/bin/ccmake"
"/usr/local/bin/cmake-"*
"/usr/local/bin/cmake"
"/usr/local/bin/cpack"
"/usr/local/bin/ctest"
"/usr/local/bin/helm"
"/usr/local/bin/kind"
"/usr/local/bin/kustomize"
"/usr/local/bin/minikube"
"/usr/local/bin/packer"
"/usr/local/bin/phpunit"
"/usr/local/bin/pulumi-"*
"/usr/local/bin/pulumi"
"/usr/local/bin/stack"
# Haskell runtime
"/usr/local/.ghcup"
# Azure
"/opt/az"
"/usr/share/az_"*
)
if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then
# Environment variable set by GitHub Actions
to_remove+=(
"${AGENT_TOOLSDIRECTORY}"
)
else
echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal."
fi
else
# Remove folders and files present in AWS CodeBuild
to_remove+=(
# binaries
"/usr/local/bin/ecs-cli"
"/usr/local/bin/eksctl"
"/usr/local/bin/kubectl"
"${HOME}/.gradle"
"${HOME}/.dotnet"
"${HOME}/.goenv"
"${HOME}/.phpenv"
)
fi
for element in "${to_remove[@]}"; do
if [ ! -e "$element" ]; then
# The file or directory doesn't exist.
# Maybe it was removed in a newer version of the runner or it's not present in a
# specific architecture (e.g. ARM).
echo "::warning::Directory or file $element does not exist, skipping."
fi
done
# Remove all files and directories at once to save time.
sudo rm -rf "${to_remove[@]}"
}
execAndMeasureSpaceChange() {
local operation=${1} # Function to execute
local title=${2}
local before
before=$(getAvailableSpace)
$operation
printSavedSpace "$before" "$title"
}
# Remove large packages
# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh
cleanPackages() {
local packages=(
'^aspnetcore-.*'
'^dotnet-.*'
'^llvm-.*'
'^mongodb-.*'
'firefox'
'libgl1-mesa-dri'
'mono-devel'
'php.*'
)
if isGitHubRunner; then
packages+=(
azure-cli
)
if isX86; then
packages+=(
'google-chrome-stable'
'google-cloud-cli'
'google-cloud-sdk'
'powershell'
)
fi
else
packages+=(
'google-chrome-stable'
)
fi
sudo apt-get -qq remove -y --fix-missing "${packages[@]}"
sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed"
sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed"
}
# Remove Docker images.
# Ubuntu 22 runners have docker images already installed.
# They aren't present in ubuntu 24 runners.
cleanDocker() {
echo "=> Removing the following docker images:"
sudo docker image ls
echo "=> Removing docker images..."
sudo docker image prune --all --force || true
}
# Remove Swap storage
cleanSwap() {
sudo swapoff -a || true
sudo rm -rf /mnt/swapfile || true
free -h
}
# Display initial disk space stats
AVAILABLE_INITIAL=$(getAvailableSpace)
printDF "BEFORE CLEAN-UP:"
echo ""
execAndMeasureSpaceChange cleanPackages "Unused packages"
execAndMeasureSpaceChange cleanDocker "Docker images"
execAndMeasureSpaceChange cleanSwap "Swap storage"
execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories"
# Output saved space statistic
echo ""
printDF "AFTER CLEAN-UP:"
echo ""
echo ""
printSavedSpace "$AVAILABLE_INITIAL" "Total saved"

View file

@ -0,0 +1,35 @@
# Free disk space on Windows GitHub action runners.
$ErrorActionPreference = 'Stop'
Get-Volume | Out-String | Write-Output
$available = $(Get-Volume C).SizeRemaining
$dirs = 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Tools\Llvm',
'C:\rtools45', 'C:\ghcup', 'C:\Program Files (x86)\Android',
'C:\Program Files\Google\Chrome', 'C:\Program Files (x86)\Microsoft\Edge',
'C:\Program Files\Mozilla Firefox', 'C:\Program Files\MySQL', 'C:\Julia',
'C:\Program Files\MongoDB', 'C:\Program Files\Azure Cosmos DB Emulator',
'C:\Program Files\PostgreSQL', 'C:\Program Files\Unity Hub',
'C:\Strawberry', 'C:\hostedtoolcache\windows\Java_Temurin-Hotspot_jdk'
foreach ($dir in $dirs) {
Start-ThreadJob -InputObject $dir {
Remove-Item -Recurse -Force -LiteralPath $input
} | Out-Null
}
foreach ($job in Get-Job) {
Wait-Job $job | Out-Null
if ($job.Error) {
Write-Output "::warning file=$PSCommandPath::$($job.Error)"
}
Remove-Job $job
}
Get-Volume | Out-String | Write-Output
$saved = ($(Get-Volume C).SizeRemaining - $available) / 1gb
$savedRounded = [math]::Round($saved, 3)
Write-Output "total space saved: $savedRounded GB"

View file

@ -1,266 +1,10 @@
#!/bin/bash
set -euo pipefail
# Free disk space on Linux GitHub action runners
# Script inspired by https://github.com/jlumbroso/free-disk-space
script_dir=$(dirname "$0")
isX86() {
local arch
arch=$(uname -m)
if [ "$arch" = "x86_64" ]; then
return 0
else
return 1
fi
}
# Check if we're on a GitHub hosted runner.
# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted".
isGitHubRunner() {
# `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string".
if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then
return 0
else
return 1
fi
}
# print a line of the specified character
printSeparationLine() {
for ((i = 0; i < 80; i++)); do
printf "%s" "$1"
done
printf "\n"
}
# compute available space
# REF: https://unix.stackexchange.com/a/42049/60849
# REF: https://stackoverflow.com/a/450821/408734
getAvailableSpace() {
df -a | awk 'NR > 1 {avail+=$4} END {print avail}'
}
# make Kb human readable (assume the input is Kb)
# REF: https://unix.stackexchange.com/a/44087/60849
formatByteCount() {
numfmt --to=iec-i --suffix=B --padding=7 "${1}000"
}
# macro to output saved space
printSavedSpace() {
# Disk space before the operation
local before=${1}
local title=${2:-}
local after
after=$(getAvailableSpace)
local saved=$((after - before))
if [ "$saved" -lt 0 ]; then
echo "::warning::Saved space is negative: $saved. Using '0' as saved space."
saved=0
fi
echo ""
printSeparationLine "*"
if [ -n "${title}" ]; then
echo "=> ${title}: Saved $(formatByteCount "$saved")"
else
echo "=> Saved $(formatByteCount "$saved")"
fi
printSeparationLine "*"
echo ""
}
# macro to print output of df with caption
printDF() {
local caption=${1}
printSeparationLine "="
echo "${caption}"
echo ""
echo "$ df -h"
echo ""
df -h
printSeparationLine "="
}
removeUnusedFilesAndDirs() {
local to_remove=(
"/usr/share/java"
)
if isGitHubRunner; then
to_remove+=(
"/usr/local/aws-sam-cli"
"/usr/local/doc/cmake"
"/usr/local/julia"*
"/usr/local/lib/android"
"/usr/local/share/chromedriver-"*
"/usr/local/share/chromium"
"/usr/local/share/cmake-"*
"/usr/local/share/edge_driver"
"/usr/local/share/emacs"
"/usr/local/share/gecko_driver"
"/usr/local/share/icons"
"/usr/local/share/powershell"
"/usr/local/share/vcpkg"
"/usr/local/share/vim"
"/usr/share/apache-maven-"*
"/usr/share/gradle-"*
"/usr/share/kotlinc"
"/usr/share/miniconda"
"/usr/share/php"
"/usr/share/ri"
"/usr/share/swift"
# binaries
"/usr/local/bin/azcopy"
"/usr/local/bin/bicep"
"/usr/local/bin/ccmake"
"/usr/local/bin/cmake-"*
"/usr/local/bin/cmake"
"/usr/local/bin/cpack"
"/usr/local/bin/ctest"
"/usr/local/bin/helm"
"/usr/local/bin/kind"
"/usr/local/bin/kustomize"
"/usr/local/bin/minikube"
"/usr/local/bin/packer"
"/usr/local/bin/phpunit"
"/usr/local/bin/pulumi-"*
"/usr/local/bin/pulumi"
"/usr/local/bin/stack"
# Haskell runtime
"/usr/local/.ghcup"
# Azure
"/opt/az"
"/usr/share/az_"*
)
if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then
# Environment variable set by GitHub Actions
to_remove+=(
"${AGENT_TOOLSDIRECTORY}"
)
else
echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal."
fi
else
# Remove folders and files present in AWS CodeBuild
to_remove+=(
# binaries
"/usr/local/bin/ecs-cli"
"/usr/local/bin/eksctl"
"/usr/local/bin/kubectl"
"${HOME}/.gradle"
"${HOME}/.dotnet"
"${HOME}/.goenv"
"${HOME}/.phpenv"
)
fi
for element in "${to_remove[@]}"; do
if [ ! -e "$element" ]; then
# The file or directory doesn't exist.
# Maybe it was removed in a newer version of the runner or it's not present in a
# specific architecture (e.g. ARM).
echo "::warning::Directory or file $element does not exist, skipping."
fi
done
# Remove all files and directories at once to save time.
sudo rm -rf "${to_remove[@]}"
}
execAndMeasureSpaceChange() {
local operation=${1} # Function to execute
local title=${2}
local before
before=$(getAvailableSpace)
$operation
printSavedSpace "$before" "$title"
}
# Remove large packages
# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh
cleanPackages() {
local packages=(
'^aspnetcore-.*'
'^dotnet-.*'
'^llvm-.*'
'^mongodb-.*'
'firefox'
'libgl1-mesa-dri'
'mono-devel'
'php.*'
)
if isGitHubRunner; then
packages+=(
azure-cli
)
if isX86; then
packages+=(
'google-chrome-stable'
'google-cloud-cli'
'google-cloud-sdk'
'powershell'
)
fi
else
packages+=(
'google-chrome-stable'
)
fi
sudo apt-get -qq remove -y --fix-missing "${packages[@]}"
sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed"
sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed"
}
# Remove Docker images.
# Ubuntu 22 runners have docker images already installed.
# They aren't present in ubuntu 24 runners.
cleanDocker() {
echo "=> Removing the following docker images:"
sudo docker image ls
echo "=> Removing docker images..."
sudo docker image prune --all --force || true
}
# Remove Swap storage
cleanSwap() {
sudo swapoff -a || true
sudo rm -rf /mnt/swapfile || true
free -h
}
# Display initial disk space stats
AVAILABLE_INITIAL=$(getAvailableSpace)
printDF "BEFORE CLEAN-UP:"
echo ""
execAndMeasureSpaceChange cleanPackages "Unused packages"
execAndMeasureSpaceChange cleanDocker "Docker images"
execAndMeasureSpaceChange cleanSwap "Swap storage"
execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories"
# Output saved space statistic
echo ""
printDF "AFTER CLEAN-UP:"
echo ""
echo ""
printSavedSpace "$AVAILABLE_INITIAL" "Total saved"
if [[ "${RUNNER_OS:-}" == "Windows" ]]; then
pwsh $script_dir/free-disk-space-windows.ps1
else
$script_dir/free-disk-space-linux.sh
fi

View file

@ -1 +1 @@
2b5e239c6b86cde974b0ef0f8e23754fb08ff3c5
32e7a4b92b109c24e9822c862a7c74436b50e564

View file

@ -294,8 +294,6 @@ See [Pretty-printer](compiletest.md#pretty-printer-tests).
- `no-auto-check-cfg` — disable auto check-cfg (only for `--check-cfg` tests)
- [`revisions`](compiletest.md#revisions) — compile multiple times
- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) -
suppress tidy checks for mentioning unknown revision names
-[`forbid-output`](compiletest.md#incremental-tests) — incremental cfail rejects
output pattern
- [`should-ice`](compiletest.md#incremental-tests) — incremental cfail should
@ -316,6 +314,17 @@ test suites that use those tools:
- `llvm-cov-flags` adds extra flags when running LLVM's `llvm-cov` tool.
- Used by [coverage tests](compiletest.md#coverage-tests) in `coverage-run` mode.
### Tidy specific directives
The following directives control how the [tidy script](../conventions.md#formatting)
verifies tests.
- `ignore-tidy-target-specific-tests` disables checking that the appropriate
LLVM component is required (via a `needs-llvm-components` directive) when a
test is compiled for a specific target (via the `--target` flag in a
`compile-flag` directive).
- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) -
suppress tidy checks for mentioning unknown revision names.
## Substitutions

View file

@ -395,6 +395,12 @@ flags to control that behavior. When the `--extern-html-root-url` flag is given
one of your dependencies, rustdoc use that URL for those docs. Keep in mind that if those docs exist
in the output directory, those local docs will still override this flag.
The names in this flag are first matched against the names given in the `--extern name=` flags,
which allows selecting between multiple crates with the same name (e.g. multiple versions of
the same crate). For transitive dependencies that haven't been loaded via an `--extern` flag, matching
falls backs to using crate names only, without ability to distinguish between multiple crates with
the same name.
## `-Z force-unstable-if-unmarked`
Using this flag looks like this:

View file

@ -36,6 +36,7 @@ use std::mem;
use rustc_ast::token::{Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_attr_data_structures::{AttributeKind, find_attr};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry};
use rustc_errors::codes::*;
use rustc_errors::{FatalError, struct_span_code_err};
@ -987,28 +988,17 @@ fn clean_proc_macro<'tcx>(
kind: MacroKind,
cx: &mut DocContext<'tcx>,
) -> ItemKind {
if kind != MacroKind::Derive {
return ProcMacroItem(ProcMacro { kind, helpers: vec![] });
}
let attrs = cx.tcx.hir_attrs(item.hir_id());
if kind == MacroKind::Derive
&& let Some(derive_name) =
hir_attr_lists(attrs, sym::proc_macro_derive).find_map(|mi| mi.ident())
{
*name = derive_name.name;
}
let Some((trait_name, helper_attrs)) = find_attr!(attrs, AttributeKind::ProcMacroDerive { trait_name, helper_attrs, ..} => (*trait_name, helper_attrs))
else {
return ProcMacroItem(ProcMacro { kind, helpers: vec![] });
};
*name = trait_name;
let helpers = helper_attrs.iter().copied().collect();
let mut helpers = Vec::new();
for mi in hir_attr_lists(attrs, sym::proc_macro_derive) {
if !mi.has_name(sym::attributes) {
continue;
}
if let Some(list) = mi.meta_item_list() {
for inner_mi in list {
if let Some(ident) = inner_mi.ident() {
helpers.push(ident.name);
}
}
}
}
ProcMacroItem(ProcMacro { kind, helpers })
}
@ -1021,17 +1011,16 @@ fn clean_fn_or_proc_macro<'tcx>(
cx: &mut DocContext<'tcx>,
) -> ItemKind {
let attrs = cx.tcx.hir_attrs(item.hir_id());
let macro_kind = attrs.iter().find_map(|a| {
if a.has_name(sym::proc_macro) {
Some(MacroKind::Bang)
} else if a.has_name(sym::proc_macro_derive) {
Some(MacroKind::Derive)
} else if a.has_name(sym::proc_macro_attribute) {
Some(MacroKind::Attr)
} else {
None
}
});
let macro_kind = if find_attr!(attrs, AttributeKind::ProcMacro(..)) {
Some(MacroKind::Bang)
} else if find_attr!(attrs, AttributeKind::ProcMacroDerive { .. }) {
Some(MacroKind::Derive)
} else if find_attr!(attrs, AttributeKind::ProcMacroAttribute(..)) {
Some(MacroKind::Attr)
} else {
None
};
match macro_kind {
Some(kind) => clean_proc_macro(item, name, kind, cx),
None => {

View file

@ -173,6 +173,9 @@ pub(crate) struct Options {
/// Arguments to be used when compiling doctests.
pub(crate) doctest_build_args: Vec<String>,
/// Target modifiers.
pub(crate) target_modifiers: BTreeMap<OptionsTargetModifiers, String>,
}
impl fmt::Debug for Options {
@ -377,7 +380,7 @@ impl Options {
early_dcx: &mut EarlyDiagCtxt,
matches: &getopts::Matches,
args: Vec<String>,
) -> Option<(InputMode, Options, RenderOptions)> {
) -> Option<(InputMode, Options, RenderOptions, Vec<PathBuf>)> {
// Check for unstable options.
nightly_options::check_nightly_options(early_dcx, matches, &opts());
@ -640,10 +643,13 @@ impl Options {
let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s));
if let Some(ref p) = extension_css
&& !p.is_file()
{
dcx.fatal("option --extend-css argument must be a file");
let mut loaded_paths = Vec::new();
if let Some(ref p) = extension_css {
loaded_paths.push(p.clone());
if !p.is_file() {
dcx.fatal("option --extend-css argument must be a file");
}
}
let mut themes = Vec::new();
@ -687,6 +693,7 @@ impl Options {
))
.emit();
}
loaded_paths.push(theme_file.clone());
themes.push(StylePath { path: theme_file });
}
}
@ -705,6 +712,7 @@ impl Options {
&mut id_map,
edition,
&None,
&mut loaded_paths,
) else {
dcx.fatal("`ExternalHtml::load` failed");
};
@ -796,7 +804,8 @@ impl Options {
let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx);
let with_examples = matches.opt_strs("with-examples");
let call_locations = crate::scrape_examples::load_call_locations(with_examples, dcx);
let call_locations =
crate::scrape_examples::load_call_locations(with_examples, dcx, &mut loaded_paths);
let doctest_build_args = matches.opt_strs("doctest-build-arg");
let unstable_features =
@ -846,6 +855,7 @@ impl Options {
unstable_features,
expanded_args: args,
doctest_build_args,
target_modifiers,
};
let render_options = RenderOptions {
output,
@ -881,7 +891,7 @@ impl Options {
parts_out_dir,
disable_minification,
};
Some((input, options, render_options))
Some((input, options, render_options, loaded_paths))
}
}

View file

@ -214,6 +214,7 @@ pub(crate) fn create_config(
scrape_examples_options,
expanded_args,
remap_path_prefix,
target_modifiers,
..
}: RustdocOptions,
render_options: &RenderOptions,
@ -277,6 +278,7 @@ pub(crate) fn create_config(
} else {
OutputTypes::new(&[])
},
target_modifiers,
..Options::default()
};

View file

@ -11,7 +11,8 @@ use std::path::{Path, PathBuf};
use std::process::{self, Command, Stdio};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::{panic, str};
use std::time::{Duration, Instant};
use std::{fmt, panic, str};
pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder};
pub(crate) use markdown::test as test_markdown;
@ -36,6 +37,50 @@ use crate::config::{Options as RustdocOptions, OutputFormat};
use crate::html::markdown::{ErrorCodes, Ignore, LangString, MdRelLine};
use crate::lint::init_lints;
/// Type used to display times (compilation and total) information for merged doctests.
struct MergedDoctestTimes {
total_time: Instant,
/// Total time spent compiling all merged doctests.
compilation_time: Duration,
/// This field is used to keep track of how many merged doctests we (tried to) compile.
added_compilation_times: usize,
}
impl MergedDoctestTimes {
fn new() -> Self {
Self {
total_time: Instant::now(),
compilation_time: Duration::default(),
added_compilation_times: 0,
}
}
fn add_compilation_time(&mut self, duration: Duration) {
self.compilation_time += duration;
self.added_compilation_times += 1;
}
fn display_times(&self) {
// If no merged doctest was compiled, then there is nothing to display since the numbers
// displayed by `libtest` for standalone tests are already accurate (they include both
// compilation and runtime).
if self.added_compilation_times > 0 {
println!("{self}");
}
}
}
impl fmt::Display for MergedDoctestTimes {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"all doctests ran in {:.2}s; merged doctests compilation took {:.2}s",
self.total_time.elapsed().as_secs_f64(),
self.compilation_time.as_secs_f64(),
)
}
}
/// Options that apply to all doctests in a crate or Markdown file (for `rustdoc foo.md`).
#[derive(Clone)]
pub(crate) struct GlobalTestOptions {
@ -295,6 +340,7 @@ pub(crate) fn run_tests(
let mut nb_errors = 0;
let mut ran_edition_tests = 0;
let mut times = MergedDoctestTimes::new();
let target_str = rustdoc_options.target.to_string();
for (MergeableTestKey { edition, global_crate_attrs_hash }, mut doctests) in mergeable_tests {
@ -314,13 +360,15 @@ pub(crate) fn run_tests(
for (doctest, scraped_test) in &doctests {
tests_runner.add_test(doctest, scraped_test, &target_str);
}
if let Ok(success) = tests_runner.run_merged_tests(
let (duration, ret) = tests_runner.run_merged_tests(
rustdoc_test_options,
edition,
&opts,
&test_args,
rustdoc_options,
) {
);
times.add_compilation_time(duration);
if let Ok(success) = ret {
ran_edition_tests += 1;
if !success {
nb_errors += 1;
@ -354,11 +402,13 @@ pub(crate) fn run_tests(
test::test_main_with_exit_callback(&test_args, standalone_tests, None, || {
// We ensure temp dir destructor is called.
std::mem::drop(temp_dir.take());
times.display_times();
});
}
if nb_errors != 0 {
// We ensure temp dir destructor is called.
std::mem::drop(temp_dir);
times.display_times();
// libtest::ERROR_EXIT_CODE is not public but it's the same value.
std::process::exit(101);
}
@ -496,16 +546,19 @@ impl RunnableDocTest {
///
/// This is the function that calculates the compiler command line, invokes the compiler, then
/// invokes the test or tests in a separate executable (if applicable).
///
/// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test.
fn run_test(
doctest: RunnableDocTest,
rustdoc_options: &RustdocOptions,
supports_color: bool,
report_unused_externs: impl Fn(UnusedExterns),
) -> Result<(), TestFailure> {
) -> (Duration, Result<(), TestFailure>) {
let langstr = &doctest.langstr;
// Make sure we emit well-formed executable names for our target.
let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target);
let output_file = doctest.test_opts.outdir.path().join(rust_out);
let instant = Instant::now();
// Common arguments used for compiling the doctest runner.
// On merged doctests, the compiler is invoked twice: once for the test code itself,
@ -589,7 +642,7 @@ fn run_test(
if std::fs::write(&input_file, &doctest.full_test_code).is_err() {
// If we cannot write this file for any reason, we leave. All combined tests will be
// tested as standalone tests.
return Err(TestFailure::CompileError);
return (Duration::default(), Err(TestFailure::CompileError));
}
if !rustdoc_options.nocapture {
// If `nocapture` is disabled, then we don't display rustc's output when compiling
@ -660,7 +713,7 @@ fn run_test(
if std::fs::write(&runner_input_file, merged_test_code).is_err() {
// If we cannot write this file for any reason, we leave. All combined tests will be
// tested as standalone tests.
return Err(TestFailure::CompileError);
return (instant.elapsed(), Err(TestFailure::CompileError));
}
if !rustdoc_options.nocapture {
// If `nocapture` is disabled, then we don't display rustc's output when compiling
@ -713,7 +766,7 @@ fn run_test(
let _bomb = Bomb(&out);
match (output.status.success(), langstr.compile_fail) {
(true, true) => {
return Err(TestFailure::UnexpectedCompilePass);
return (instant.elapsed(), Err(TestFailure::UnexpectedCompilePass));
}
(true, false) => {}
(false, true) => {
@ -729,17 +782,18 @@ fn run_test(
.collect();
if !missing_codes.is_empty() {
return Err(TestFailure::MissingErrorCodes(missing_codes));
return (instant.elapsed(), Err(TestFailure::MissingErrorCodes(missing_codes)));
}
}
}
(false, false) => {
return Err(TestFailure::CompileError);
return (instant.elapsed(), Err(TestFailure::CompileError));
}
}
let duration = instant.elapsed();
if doctest.no_run {
return Ok(());
return (duration, Ok(()));
}
// Run the code!
@ -771,17 +825,17 @@ fn run_test(
cmd.output()
};
match result {
Err(e) => return Err(TestFailure::ExecutionError(e)),
Err(e) => return (duration, Err(TestFailure::ExecutionError(e))),
Ok(out) => {
if langstr.should_panic && out.status.success() {
return Err(TestFailure::UnexpectedRunPass);
return (duration, Err(TestFailure::UnexpectedRunPass));
} else if !langstr.should_panic && !out.status.success() {
return Err(TestFailure::ExecutionFailure(out));
return (duration, Err(TestFailure::ExecutionFailure(out)));
}
}
}
Ok(())
(duration, Ok(()))
}
/// Converts a path intended to use as a command to absolute if it is
@ -1071,7 +1125,7 @@ fn doctest_run_fn(
no_run: scraped_test.no_run(&rustdoc_options),
merged_test_code: None,
};
let res =
let (_, res) =
run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs);
if let Err(err) = res {

View file

@ -1,4 +1,5 @@
use std::fmt::Write;
use std::time::Duration;
use rustc_data_structures::fx::FxIndexSet;
use rustc_span::edition::Edition;
@ -67,6 +68,10 @@ impl DocTestRunner {
self.nb_tests += 1;
}
/// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test.
///
/// If compilation failed, it will return `Err`, otherwise it will return `Ok` containing if
/// the test ran successfully.
pub(crate) fn run_merged_tests(
&mut self,
test_options: IndividualTestOptions,
@ -74,7 +79,7 @@ impl DocTestRunner {
opts: &GlobalTestOptions,
test_args: &[String],
rustdoc_options: &RustdocOptions,
) -> Result<bool, ()> {
) -> (Duration, Result<bool, ()>) {
let mut code = "\
#![allow(unused_extern_crates)]
#![allow(internal_features)]
@ -204,9 +209,9 @@ std::process::Termination::report(test::test_main(test_args, tests, None))
no_run: false,
merged_test_code: Some(code),
};
let ret =
let (duration, ret) =
run_test(runnable_test, rustdoc_options, self.supports_color, |_: UnusedExterns| {});
if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) }
(duration, if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) })
}
}

View file

@ -1,4 +1,4 @@
use std::path::Path;
use std::path::{Path, PathBuf};
use std::{fs, str};
use rustc_errors::DiagCtxtHandle;
@ -32,12 +32,13 @@ impl ExternalHtml {
id_map: &mut IdMap,
edition: Edition,
playground: &Option<Playground>,
loaded_paths: &mut Vec<PathBuf>,
) -> Option<ExternalHtml> {
let codes = ErrorCodes::from(nightly_build);
let ih = load_external_files(in_header, dcx)?;
let ih = load_external_files(in_header, dcx, loaded_paths)?;
let bc = {
let mut bc = load_external_files(before_content, dcx)?;
let m_bc = load_external_files(md_before_content, dcx)?;
let mut bc = load_external_files(before_content, dcx, loaded_paths)?;
let m_bc = load_external_files(md_before_content, dcx, loaded_paths)?;
Markdown {
content: &m_bc,
links: &[],
@ -52,8 +53,8 @@ impl ExternalHtml {
bc
};
let ac = {
let mut ac = load_external_files(after_content, dcx)?;
let m_ac = load_external_files(md_after_content, dcx)?;
let mut ac = load_external_files(after_content, dcx, loaded_paths)?;
let m_ac = load_external_files(md_after_content, dcx, loaded_paths)?;
Markdown {
content: &m_ac,
links: &[],
@ -79,8 +80,10 @@ pub(crate) enum LoadStringError {
pub(crate) fn load_string<P: AsRef<Path>>(
file_path: P,
dcx: DiagCtxtHandle<'_>,
loaded_paths: &mut Vec<PathBuf>,
) -> Result<String, LoadStringError> {
let file_path = file_path.as_ref();
loaded_paths.push(file_path.to_owned());
let contents = match fs::read(file_path) {
Ok(bytes) => bytes,
Err(e) => {
@ -101,10 +104,14 @@ pub(crate) fn load_string<P: AsRef<Path>>(
}
}
fn load_external_files(names: &[String], dcx: DiagCtxtHandle<'_>) -> Option<String> {
fn load_external_files(
names: &[String],
dcx: DiagCtxtHandle<'_>,
loaded_paths: &mut Vec<PathBuf>,
) -> Option<String> {
let mut out = String::new();
for name in names {
let Ok(s) = load_string(name, dcx) else { return None };
let Ok(s) = load_string(name, dcx, loaded_paths) else { return None };
out.push_str(&s);
out.push('\n');
}

View file

@ -4,6 +4,7 @@ use rustc_ast::join_path_syms;
use rustc_attr_data_structures::StabilityLevel;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
use rustc_metadata::creader::CStore;
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::Symbol;
use tracing::debug;
@ -158,18 +159,33 @@ impl Cache {
assert!(cx.external_traits.is_empty());
cx.cache.traits = mem::take(&mut krate.external_traits);
let render_options = &cx.render_options;
let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence;
let dst = &render_options.output;
// Make `--extern-html-root-url` support the same names as `--extern` whenever possible
let cstore = CStore::from_tcx(tcx);
for (name, extern_url) in &render_options.extern_html_root_urls {
if let Some(crate_num) = cstore.resolved_extern_crate(Symbol::intern(name)) {
let e = ExternalCrate { crate_num };
let location = e.location(Some(extern_url), extern_url_takes_precedence, dst, tcx);
cx.cache.extern_locations.insert(e.crate_num, location);
}
}
// Cache where all our extern crates are located
// FIXME: this part is specific to HTML so it'd be nice to remove it from the common code
// This is also used in the JSON output.
for &crate_num in tcx.crates(()) {
let e = ExternalCrate { crate_num };
let name = e.name(tcx);
let render_options = &cx.render_options;
let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u);
let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence;
let dst = &render_options.output;
let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx);
cx.cache.extern_locations.insert(e.crate_num, location);
cx.cache.extern_locations.entry(e.crate_num).or_insert_with(|| {
// falls back to matching by crates' own names, because
// transitive dependencies and injected crates may be loaded without `--extern`
let extern_url =
render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u);
e.location(extern_url, extern_url_takes_precedence, dst, tcx)
});
cx.cache.external_paths.insert(e.def_id(), (vec![name], ItemType::Module));
}

View file

@ -799,7 +799,7 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
// Note that we discard any distinction between different non-zero exit
// codes from `from_matches` here.
let (input, options, render_options) =
let (input, options, render_options, loaded_paths) =
match config::Options::from_matches(early_dcx, &matches, args) {
Some(opts) => opts,
None => return,
@ -870,6 +870,12 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
interface::run_compiler(config, |compiler| {
let sess = &compiler.sess;
// Register the loaded external files in the source map so they show up in depinfo.
// We can't load them via the source map because it gets created after we process the options.
for external_path in &loaded_paths {
let _ = sess.source_map().load_file(external_path);
}
if sess.opts.describe_lints {
rustc_driver::describe_lints(sess, registered_lints);
return;

View file

@ -333,9 +333,11 @@ pub(crate) fn run(
pub(crate) fn load_call_locations(
with_examples: Vec<String>,
dcx: DiagCtxtHandle<'_>,
loaded_paths: &mut Vec<PathBuf>,
) -> AllCallLocations {
let mut all_calls: AllCallLocations = FxIndexMap::default();
for path in with_examples {
loaded_paths.push(path.clone().into());
let bytes = match fs::read(&path) {
Ok(bytes) => bytes,
Err(e) => dcx.fatal(format!("failed to load examples: {e}")),

View file

@ -98,7 +98,7 @@ impl<'tcx> LateLintPass<'tcx> for AssigningClones {
// That is overly conservative - the lint should fire even if there was no initializer,
// but the variable has been initialized before `lhs` was evaluated.
&& path_to_local(lhs).is_none_or(|lhs| local_is_initialized(cx, lhs))
&& let Some(resolved_impl) = cx.tcx.impl_of_method(resolved_fn.def_id())
&& let Some(resolved_impl) = cx.tcx.impl_of_assoc(resolved_fn.def_id())
// Derived forms don't implement `clone_from`/`clone_into`.
// See https://github.com/rust-lang/rust/pull/98445#issuecomment-1190681305
&& !cx.tcx.is_builtin_derived(resolved_impl)

View file

@ -63,7 +63,7 @@ fn is_used_as_unaligned(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
ExprKind::MethodCall(name, self_arg, ..) if self_arg.hir_id == e.hir_id => {
if matches!(name.ident.name, sym::read_unaligned | sym::write_unaligned)
&& let Some(def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id)
&& let Some(def_id) = cx.tcx.impl_of_method(def_id)
&& let Some(def_id) = cx.tcx.impl_of_assoc(def_id)
&& cx.tcx.type_of(def_id).instantiate_identity().is_raw_ptr()
{
true

View file

@ -37,7 +37,7 @@ fn get_const_name_and_ty_name(
} else {
return None;
}
} else if let Some(impl_id) = cx.tcx.impl_of_method(method_def_id)
} else if let Some(impl_id) = cx.tcx.impl_of_assoc(method_def_id)
&& let Some(ty_name) = get_primitive_ty_name(cx.tcx.type_of(impl_id).instantiate_identity())
&& matches!(
method_name,

View file

@ -364,7 +364,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
// * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take
// priority.
if let Some(fn_id) = typeck.type_dependent_def_id(hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id)
&& let arg_ty = cx.tcx.erase_regions(adjusted_ty)
&& let ty::Ref(_, sub_ty, _) = *arg_ty.kind()
&& let args =

View file

@ -339,7 +339,7 @@ fn check_with_condition<'tcx>(
ExprKind::Path(QPath::TypeRelative(_, name)) => {
if name.ident.name == sym::MIN
&& let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(const_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(const_id)
&& let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl
&& cx.tcx.type_of(impl_id).instantiate_identity().is_integral()
{
@ -350,7 +350,7 @@ fn check_with_condition<'tcx>(
if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind
&& name.ident.name == sym::min_value
&& let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(func_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(func_id)
&& let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl
&& cx.tcx.type_of(impl_id).instantiate_identity().is_integral()
{

View file

@ -317,7 +317,7 @@ impl<'tcx> Visitor<'tcx> for VarVisitor<'_, 'tcx> {
.cx
.typeck_results()
.type_dependent_def_id(expr.hir_id)
.and_then(|def_id| self.cx.tcx.trait_of_item(def_id))
.and_then(|def_id| self.cx.tcx.trait_of_assoc(def_id))
&& ((meth.ident.name == sym::index && self.cx.tcx.lang_items().index_trait() == Some(trait_id))
|| (meth.ident.name == sym::index_mut && self.cx.tcx.lang_items().index_mut_trait() == Some(trait_id)))
&& !self.check(args_1, args_0, expr)

View file

@ -14,7 +14,7 @@ pub(super) fn check<'tcx>(
bytes_recv: &'tcx hir::Expr<'_>,
) {
if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(bytes_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(bytes_id)
&& cx.tcx.type_of(impl_id).instantiate_identity().is_str()
&& let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs()
&& (ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String))

View file

@ -30,7 +30,7 @@ pub(super) fn check<'tcx>(
}
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& cx.tcx.type_of(impl_id).instantiate_identity().is_str()
&& let ExprKind::Lit(Spanned {
node: LitKind::Str(ext_literal, ..),

View file

@ -28,7 +28,7 @@ pub(super) fn check(
if cx
.typeck_results()
.type_dependent_def_id(expr.hir_id)
.and_then(|id| cx.tcx.trait_of_item(id))
.and_then(|id| cx.tcx.trait_of_assoc(id))
.zip(cx.tcx.lang_items().clone_trait())
.is_none_or(|(x, y)| x != y)
{

View file

@ -18,7 +18,7 @@ pub(super) fn check<'tcx>(
arg: &'tcx hir::Expr<'_>,
) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& let identity = cx.tcx.type_of(impl_id).instantiate_identity()
&& let hir::ExprKind::Lit(Spanned {
node: LitKind::Int(Pu128(0), _),

View file

@ -50,7 +50,7 @@ pub fn is_clone_like(cx: &LateContext<'_>, method_name: Symbol, method_def_id: h
sym::to_path_buf => is_diag_item_method(cx, method_def_id, sym::Path),
sym::to_vec => cx
.tcx
.impl_of_method(method_def_id)
.impl_of_assoc(method_def_id)
.filter(|&impl_did| {
cx.tcx.type_of(impl_did).instantiate_identity().is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none()
})

View file

@ -44,9 +44,9 @@ pub(super) fn check<'tcx>(
let typeck = cx.typeck_results();
if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
&& let Some(method_id) = typeck.type_dependent_def_id(expr.hir_id)
&& cx.tcx.trait_of_item(method_id) == Some(iter_id)
&& cx.tcx.trait_of_assoc(method_id) == Some(iter_id)
&& let Some(method_id) = typeck.type_dependent_def_id(cloned_call.hir_id)
&& cx.tcx.trait_of_item(method_id) == Some(iter_id)
&& cx.tcx.trait_of_assoc(method_id) == Some(iter_id)
&& let cloned_recv_ty = typeck.expr_ty_adjusted(cloned_recv)
&& let Some(iter_assoc_ty) = cx.get_associated_type(cloned_recv_ty, iter_id, sym::Item)
&& matches!(*iter_assoc_ty.kind(), ty::Ref(_, ty, _) if !is_copy(cx, ty))

View file

@ -18,7 +18,7 @@ pub(super) fn check<'tcx>(
map_expr: &'tcx Expr<'_>,
) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option)
&& let ExprKind::Call(err_path, [err_arg]) = or_expr.kind
&& is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr)

View file

@ -59,7 +59,7 @@ pub(super) fn check(
&& is_type_lang_item(cx, cx.typeck_results().expr_ty(collect_expr), LangItem::String)
&& let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id)
&& let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
&& cx.tcx.trait_of_item(take_id) == Some(iter_trait_id)
&& cx.tcx.trait_of_assoc(take_id) == Some(iter_trait_id)
&& let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg)
&& let ctxt = collect_expr.span.ctxt()
&& ctxt == take_expr.span.ctxt()

View file

@ -23,7 +23,7 @@ fn should_run_lint(cx: &LateContext<'_>, e: &hir::Expr<'_>, method_id: DefId) ->
return true;
}
// We check if it's an `Option` or a `Result`.
if let Some(id) = cx.tcx.impl_of_method(method_id) {
if let Some(id) = cx.tcx.impl_of_assoc(method_id) {
let identity = cx.tcx.type_of(id).instantiate_identity();
if !is_type_diagnostic_item(cx, identity, sym::Option) && !is_type_diagnostic_item(cx, identity, sym::Result) {
return false;
@ -69,7 +69,7 @@ pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_
hir::ExprKind::MethodCall(method, obj, [], _) => {
if ident_eq(name, obj) && method.ident.name == sym::clone
&& let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id)
&& cx.tcx.lang_items().clone_trait() == Some(trait_id)
// no autoderefs
&& !cx.typeck_results().expr_adjustments(obj).iter()

View file

@ -8,7 +8,7 @@ use super::MAP_ERR_IGNORE;
pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, arg: &Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Result)
&& let ExprKind::Closure(&Closure {
capture_clause: CaptureBy::Ref,

View file

@ -13,7 +13,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>, recv: &'
&& let (_, ref_depth, Mutability::Mut) = peel_mid_ty_refs_is_mutable(cx.typeck_results().expr_ty(recv))
&& ref_depth >= 1
&& let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex)
{
span_lint_and_sugg(

View file

@ -18,7 +18,7 @@ fn is_open_options(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_open_options(cx, cx.tcx.type_of(impl_id).instantiate_identity())
{
let mut options = Vec::new();
@ -111,7 +111,7 @@ fn get_open_options(
// This might be a user defined extension trait with a method like `truncate_write`
// which would be a false positive
if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(argument.hir_id)
&& cx.tcx.trait_of_item(method_def_id).is_some()
&& cx.tcx.trait_of_assoc(method_def_id).is_some()
{
return false;
}

View file

@ -11,7 +11,7 @@ use super::PATH_BUF_PUSH_OVERWRITE;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, arg: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf)
&& let ExprKind::Lit(lit) = arg.kind
&& let LitKind::Str(ref path_lit, _) = lit.node

View file

@ -9,7 +9,7 @@ use super::STABLE_SORT_PRIMITIVE;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& cx.tcx.type_of(impl_id).instantiate_identity().is_slice()
&& let Some(slice_type) = is_slice_of_primitives(cx, recv)
{

View file

@ -286,7 +286,7 @@ fn parse_iter_usage<'tcx>(
let iter_id = cx.tcx.get_diagnostic_item(sym::Iterator)?;
match (name.ident.name, args) {
(sym::next, []) if cx.tcx.trait_of_item(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span),
(sym::next, []) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span),
(sym::next_tuple, []) => {
return if paths::ITERTOOLS_NEXT_TUPLE.matches(cx, did)
&& let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind()
@ -303,7 +303,7 @@ fn parse_iter_usage<'tcx>(
None
};
},
(sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_item(did) == Some(iter_id) => {
(sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => {
if let Some(Constant::Int(idx)) = ConstEvalCtxt::new(cx).eval(idx_expr) {
let span = if name.ident.as_str() == "nth" {
e.span
@ -312,7 +312,7 @@ fn parse_iter_usage<'tcx>(
&& next_name.ident.name == sym::next
&& next_expr.span.ctxt() == ctxt
&& let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id)
&& cx.tcx.trait_of_item(next_id) == Some(iter_id)
&& cx.tcx.trait_of_assoc(next_id) == Some(iter_id)
{
next_expr.span
} else {

View file

@ -10,7 +10,7 @@ use super::SUSPICIOUS_SPLITN;
pub(super) fn check(cx: &LateContext<'_>, method_name: Symbol, expr: &Expr<'_>, self_arg: &Expr<'_>, count: u128) {
if count <= 1
&& let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(call_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(call_id)
&& cx.tcx.impl_trait_ref(impl_id).is_none()
&& let self_ty = cx.tcx.type_of(impl_id).instantiate_identity()
&& (self_ty.is_slice() || self_ty.is_str())

View file

@ -165,7 +165,7 @@ pub(super) fn check_method(cx: &LateContext<'_>, expr: &Expr<'_>) {
pub(super) fn check_function(cx: &LateContext<'_>, expr: &Expr<'_>, callee: &Expr<'_>) {
if let ExprKind::Path(ref qpath) = callee.kind
&& let Some(item_def_id) = cx.qpath_res(qpath, callee.hir_id).opt_def_id()
&& let Some(trait_def_id) = cx.tcx.trait_of_item(item_def_id)
&& let Some(trait_def_id) = cx.tcx.trait_of_assoc(item_def_id)
{
let qpath_spans = match qpath {
QPath::Resolved(_, path) => {

View file

@ -114,7 +114,7 @@ fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident
fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) -> Option<LintTrigger> {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& cx.tcx.type_of(impl_id).instantiate_identity().is_slice()
&& let ExprKind::Closure(&Closure { body, .. }) = arg.kind
&& let closure_body = cx.tcx.hir_body(body)

View file

@ -694,7 +694,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx
sym::to_string => cx.tcx.is_diagnostic_item(sym::to_string_method, method_def_id),
sym::to_vec => cx
.tcx
.impl_of_method(method_def_id)
.impl_of_assoc(method_def_id)
.filter(|&impl_did| cx.tcx.type_of(impl_did).instantiate_identity().is_slice())
.is_some(),
_ => false,
@ -734,7 +734,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx
fn check_borrow_predicate<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
if let ExprKind::MethodCall(_, caller, &[arg], _) = expr.kind
&& let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& cx.tcx.trait_of_item(method_def_id).is_none()
&& cx.tcx.trait_of_assoc(method_def_id).is_none()
&& let Some(borrow_id) = cx.tcx.get_diagnostic_item(sym::Borrow)
&& cx.tcx.predicates_of(method_def_id).predicates.iter().any(|(pred, _)| {
if let ClauseKind::Trait(trait_pred) = pred.kind().skip_binder()

View file

@ -79,7 +79,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: Symbo
applicability,
);
}
} else if let Some(impl_id) = cx.tcx.impl_of_method(def_id)
} else if let Some(impl_id) = cx.tcx.impl_of_assoc(def_id)
&& let Some(adt) = cx.tcx.type_of(impl_id).instantiate_identity().ty_adt_def()
&& matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::Option | sym::Result))
{
@ -131,7 +131,7 @@ fn is_calling_clone(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
hir::ExprKind::MethodCall(method, obj, [], _) => {
if method.ident.name == sym::clone
&& let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id)
// We check it's the `Clone` trait.
&& cx.tcx.lang_items().clone_trait().is_some_and(|id| id == trait_id)
// no autoderefs

View file

@ -18,7 +18,7 @@ pub(super) fn check<'tcx>(
name_span: Span,
) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
&& let Some(impl_id) = cx.tcx.impl_of_assoc(method_id)
&& is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec)
&& let ExprKind::Lit(Spanned {
node: LitKind::Int(Pu128(0), _),

View file

@ -312,9 +312,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
/// Functions marked with these attributes must have the exact signature.
pub(crate) fn requires_exact_signature(attrs: &[Attribute]) -> bool {
attrs.iter().any(|attr| {
[sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
.iter()
.any(|&allow| attr.has_name(allow))
attr.is_proc_macro_attr()
})
}

View file

@ -41,7 +41,7 @@ fn check_op(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool)
ExprKind::MethodCall(_, arg, [], _)
if typeck
.type_dependent_def_id(expr.hir_id)
.and_then(|id| cx.tcx.trait_of_item(id))
.and_then(|id| cx.tcx.trait_of_assoc(id))
.is_some_and(|id| matches!(cx.tcx.get_diagnostic_name(id), Some(sym::ToString | sym::ToOwned))) =>
{
(arg, arg.span)

View file

@ -179,7 +179,7 @@ fn in_impl<'tcx>(
bin_op: DefId,
) -> Option<(&'tcx rustc_hir::Ty<'tcx>, &'tcx rustc_hir::Ty<'tcx>)> {
if let Some(block) = get_enclosing_block(cx, e.hir_id)
&& let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id())
&& let Some(impl_def_id) = cx.tcx.impl_of_assoc(block.hir_id.owner.to_def_id())
&& let item = cx.tcx.hir_expect_item(impl_def_id.expect_local())
&& let ItemKind::Impl(item) = &item.kind
&& let Some(of_trait) = &item.of_trait

View file

@ -380,7 +380,7 @@ fn can_switch_ranges<'tcx>(
if let ExprKind::MethodCall(_, receiver, _, _) = parent_expr.kind
&& receiver.hir_id == use_ctxt.child_id
&& let Some(method_did) = cx.typeck_results().type_dependent_def_id(parent_expr.hir_id)
&& let Some(trait_did) = cx.tcx.trait_of_item(method_did)
&& let Some(trait_did) = cx.tcx.trait_of_assoc(method_did)
&& matches!(
cx.tcx.get_diagnostic_name(trait_did),
Some(sym::Iterator | sym::IntoIterator | sym::RangeBounds)

View file

@ -113,7 +113,7 @@ impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse {
) {
if matches!(kind, FnKind::Method(_, _))
// We are only interested in methods, not in functions or associated functions.
&& let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id())
&& let Some(impl_def) = cx.tcx.impl_of_assoc(fn_def.to_def_id())
// We don't want this method to be te implementation of a trait because the
// `#[must_use]` should be put on the trait definition directly.
&& cx.tcx.trait_id_of_impl(impl_def).is_none()

View file

@ -206,7 +206,7 @@ fn check_partial_eq(cx: &LateContext<'_>, method_span: Span, method_def_id: Loca
let arg_ty = cx.typeck_results().expr_ty_adjusted(arg);
if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id)
&& trait_id == trait_def_id
&& matches_ty(receiver_ty, arg_ty, self_arg, other_arg)
{
@ -250,7 +250,7 @@ fn check_to_string(cx: &LateContext<'_>, method_span: Span, method_def_id: Local
let is_bad = match expr.kind {
ExprKind::MethodCall(segment, _receiver, &[_arg], _) if segment.ident.name == name.name => {
if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
&& let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id)
&& trait_id == trait_def_id
{
true
@ -318,7 +318,7 @@ where
&& let ExprKind::Path(qpath) = f.kind
&& is_default_method_on_current_ty(self.cx.tcx, qpath, self.implemented_ty_id)
&& let Some(method_def_id) = path_def_id(self.cx, f)
&& let Some(trait_def_id) = self.cx.tcx.trait_of_item(method_def_id)
&& let Some(trait_def_id) = self.cx.tcx.trait_of_assoc(method_def_id)
&& self.cx.tcx.is_diagnostic_item(sym::Default, trait_def_id)
{
span_error(self.cx, self.method_span, expr);
@ -426,7 +426,7 @@ fn check_from(cx: &LateContext<'_>, method_span: Span, method_def_id: LocalDefId
if let Some((fn_def_id, node_args)) = fn_def_id_with_node_args(cx, expr)
&& let [s1, s2] = **node_args
&& let (Some(s1), Some(s2)) = (s1.as_type(), s2.as_type())
&& let Some(trait_def_id) = cx.tcx.trait_of_item(fn_def_id)
&& let Some(trait_def_id) = cx.tcx.trait_of_assoc(fn_def_id)
&& cx.tcx.is_diagnostic_item(sym::Into, trait_def_id)
&& get_impl_trait_def_id(cx, method_def_id) == cx.tcx.get_diagnostic_item(sym::From)
&& s1 == sig.inputs()[0]

View file

@ -84,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount {
/// get desugared to match.
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'tcx>) {
let fn_def_id = block.hir_id.owner.to_def_id();
if let Some(impl_id) = cx.tcx.impl_of_method(fn_def_id)
if let Some(impl_id) = cx.tcx.impl_of_assoc(fn_def_id)
&& let Some(trait_id) = cx.tcx.trait_id_of_impl(impl_id)
{
// We don't want to lint inside io::Read or io::Write implementations, as the author has more
@ -300,7 +300,7 @@ fn check_io_mode(cx: &LateContext<'_>, call: &hir::Expr<'_>) -> Option<IoOp> {
};
if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(call.hir_id)
&& let Some(trait_def_id) = cx.tcx.trait_of_item(method_def_id)
&& let Some(trait_def_id) = cx.tcx.trait_of_assoc(method_def_id)
{
if let Some(diag_name) = cx.tcx.get_diagnostic_name(trait_def_id) {
match diag_name {

View file

@ -51,7 +51,7 @@ impl ops::BitOrAssign for EagernessSuggestion {
fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: bool) -> EagernessSuggestion {
use EagernessSuggestion::{Eager, Lazy, NoChange};
let ty = match cx.tcx.impl_of_method(fn_id) {
let ty = match cx.tcx.impl_of_assoc(fn_id) {
Some(id) => cx.tcx.type_of(id).instantiate_identity(),
None => return Lazy,
};

View file

@ -349,7 +349,7 @@ pub fn is_ty_alias(qpath: &QPath<'_>) -> bool {
/// Checks if the given method call expression calls an inherent method.
pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) {
cx.tcx.trait_of_item(method_id).is_none()
cx.tcx.trait_of_assoc(method_id).is_none()
} else {
false
}
@ -357,7 +357,7 @@ pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
/// Checks if a method is defined in an impl of a diagnostic item
pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
if let Some(impl_did) = cx.tcx.impl_of_method(def_id)
if let Some(impl_did) = cx.tcx.impl_of_assoc(def_id)
&& let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
{
return cx.tcx.is_diagnostic_item(diag_item, adt.did());
@ -367,7 +367,7 @@ pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbo
/// Checks if a method is in a diagnostic item trait
pub fn is_diag_trait_item(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
if let Some(trait_did) = cx.tcx.trait_of_item(def_id) {
if let Some(trait_did) = cx.tcx.trait_of_assoc(def_id) {
return cx.tcx.is_diagnostic_item(diag_item, trait_did);
}
false
@ -620,7 +620,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<
if let QPath::TypeRelative(_, method) = path
&& method.ident.name == sym::new
&& let Some(impl_did) = cx.tcx.impl_of_method(def_id)
&& let Some(impl_did) = cx.tcx.impl_of_assoc(def_id)
&& let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
{
return std_types_symbols.iter().any(|&symbol| {

View file

@ -420,7 +420,7 @@ pub fn is_stable_const_fn(cx: &LateContext<'_>, def_id: DefId, msrv: Msrv) -> bo
.lookup_const_stability(def_id)
.or_else(|| {
cx.tcx
.trait_of_item(def_id)
.trait_of_assoc(def_id)
.and_then(|trait_def_id| cx.tcx.lookup_const_stability(trait_def_id))
})
.is_none_or(|const_stab| {

View file

@ -12,6 +12,9 @@ use tracing::*;
use crate::common::{CodegenBackend, Config, Debugger, FailMode, PassMode, RunFailMode, TestMode};
use crate::debuggers::{extract_cdb_version, extract_gdb_version};
use crate::directives::auxiliary::{AuxProps, parse_and_update_aux};
use crate::directives::directive_names::{
KNOWN_DIRECTIVE_NAMES, KNOWN_HTMLDOCCK_DIRECTIVE_NAMES, KNOWN_JSONDOCCK_DIRECTIVE_NAMES,
};
use crate::directives::needs::CachedNeedsConditions;
use crate::errors::ErrorKind;
use crate::executor::{CollectedTestDesc, ShouldPanic};
@ -20,6 +23,7 @@ use crate::util::static_regex;
pub(crate) mod auxiliary;
mod cfg;
mod directive_names;
mod needs;
#[cfg(test)]
mod tests;
@ -59,9 +63,9 @@ impl EarlyProps {
&mut poisoned,
testfile,
rdr,
&mut |DirectiveLine { raw_directive: ln, .. }| {
parse_and_update_aux(config, ln, &mut props.aux);
config.parse_and_update_revisions(testfile, ln, &mut props.revisions);
&mut |DirectiveLine { line_number, raw_directive: ln, .. }| {
parse_and_update_aux(config, ln, testfile, line_number, &mut props.aux);
config.parse_and_update_revisions(testfile, line_number, ln, &mut props.revisions);
},
);
@ -351,7 +355,7 @@ impl TestProps {
&mut poisoned,
testfile,
file,
&mut |directive @ DirectiveLine { raw_directive: ln, .. }| {
&mut |directive @ DirectiveLine { line_number, raw_directive: ln, .. }| {
if !directive.applies_to_test_revision(test_revision) {
return;
}
@ -361,17 +365,28 @@ impl TestProps {
config.push_name_value_directive(
ln,
ERROR_PATTERN,
testfile,
line_number,
&mut self.error_patterns,
|r| r,
);
config.push_name_value_directive(
ln,
REGEX_ERROR_PATTERN,
testfile,
line_number,
&mut self.regex_error_patterns,
|r| r,
);
config.push_name_value_directive(ln, DOC_FLAGS, &mut self.doc_flags, |r| r);
config.push_name_value_directive(
ln,
DOC_FLAGS,
testfile,
line_number,
&mut self.doc_flags,
|r| r,
);
fn split_flags(flags: &str) -> Vec<String> {
// Individual flags can be single-quoted to preserve spaces; see
@ -386,7 +401,9 @@ impl TestProps {
.collect::<Vec<_>>()
}
if let Some(flags) = config.parse_name_value_directive(ln, COMPILE_FLAGS) {
if let Some(flags) =
config.parse_name_value_directive(ln, COMPILE_FLAGS, testfile, line_number)
{
let flags = split_flags(&flags);
for flag in &flags {
if flag == "--edition" || flag.starts_with("--edition=") {
@ -395,25 +412,40 @@ impl TestProps {
}
self.compile_flags.extend(flags);
}
if config.parse_name_value_directive(ln, INCORRECT_COMPILER_FLAGS).is_some() {
if config
.parse_name_value_directive(
ln,
INCORRECT_COMPILER_FLAGS,
testfile,
line_number,
)
.is_some()
{
panic!("`compiler-flags` directive should be spelled `compile-flags`");
}
if let Some(edition) = config.parse_edition(ln) {
if let Some(edition) = config.parse_edition(ln, testfile, line_number) {
// The edition is added at the start, since flags from //@compile-flags must
// be passed to rustc last.
self.compile_flags.insert(0, format!("--edition={}", edition.trim()));
has_edition = true;
}
config.parse_and_update_revisions(testfile, ln, &mut self.revisions);
config.parse_and_update_revisions(
testfile,
line_number,
ln,
&mut self.revisions,
);
if let Some(flags) = config.parse_name_value_directive(ln, RUN_FLAGS) {
if let Some(flags) =
config.parse_name_value_directive(ln, RUN_FLAGS, testfile, line_number)
{
self.run_flags.extend(split_flags(&flags));
}
if self.pp_exact.is_none() {
self.pp_exact = config.parse_pp_exact(ln, testfile);
self.pp_exact = config.parse_pp_exact(ln, testfile, line_number);
}
config.set_name_directive(ln, SHOULD_ICE, &mut self.should_ice);
@ -435,7 +467,9 @@ impl TestProps {
);
config.set_name_directive(ln, NO_PREFER_DYNAMIC, &mut self.no_prefer_dynamic);
if let Some(m) = config.parse_name_value_directive(ln, PRETTY_MODE) {
if let Some(m) =
config.parse_name_value_directive(ln, PRETTY_MODE, testfile, line_number)
{
self.pretty_mode = m;
}
@ -446,35 +480,45 @@ impl TestProps {
);
// Call a helper method to deal with aux-related directives.
parse_and_update_aux(config, ln, &mut self.aux);
parse_and_update_aux(config, ln, testfile, line_number, &mut self.aux);
config.push_name_value_directive(
ln,
EXEC_ENV,
testfile,
line_number,
&mut self.exec_env,
Config::parse_env,
);
config.push_name_value_directive(
ln,
UNSET_EXEC_ENV,
testfile,
line_number,
&mut self.unset_exec_env,
|r| r.trim().to_owned(),
);
config.push_name_value_directive(
ln,
RUSTC_ENV,
testfile,
line_number,
&mut self.rustc_env,
Config::parse_env,
);
config.push_name_value_directive(
ln,
UNSET_RUSTC_ENV,
testfile,
line_number,
&mut self.unset_rustc_env,
|r| r.trim().to_owned(),
);
config.push_name_value_directive(
ln,
FORBID_OUTPUT,
testfile,
line_number,
&mut self.forbid_output,
|r| r,
);
@ -510,7 +554,7 @@ impl TestProps {
}
if let Some(code) = config
.parse_name_value_directive(ln, FAILURE_STATUS)
.parse_name_value_directive(ln, FAILURE_STATUS, testfile, line_number)
.and_then(|code| code.trim().parse::<i32>().ok())
{
self.failure_status = Some(code);
@ -531,6 +575,8 @@ impl TestProps {
config.set_name_value_directive(
ln,
ASSEMBLY_OUTPUT,
testfile,
line_number,
&mut self.assembly_output,
|r| r.trim().to_string(),
);
@ -543,7 +589,9 @@ impl TestProps {
// Unlike the other `name_value_directive`s this needs to be handled manually,
// because it sets a `bool` flag.
if let Some(known_bug) = config.parse_name_value_directive(ln, KNOWN_BUG) {
if let Some(known_bug) =
config.parse_name_value_directive(ln, KNOWN_BUG, testfile, line_number)
{
let known_bug = known_bug.trim();
if known_bug == "unknown"
|| known_bug.split(',').all(|issue_ref| {
@ -571,16 +619,25 @@ impl TestProps {
config.set_name_value_directive(
ln,
TEST_MIR_PASS,
testfile,
line_number,
&mut self.mir_unit_test,
|s| s.trim().to_string(),
);
config.set_name_directive(ln, REMAP_SRC_BASE, &mut self.remap_src_base);
if let Some(flags) = config.parse_name_value_directive(ln, LLVM_COV_FLAGS) {
if let Some(flags) =
config.parse_name_value_directive(ln, LLVM_COV_FLAGS, testfile, line_number)
{
self.llvm_cov_flags.extend(split_flags(&flags));
}
if let Some(flags) = config.parse_name_value_directive(ln, FILECHECK_FLAGS) {
if let Some(flags) = config.parse_name_value_directive(
ln,
FILECHECK_FLAGS,
testfile,
line_number,
) {
self.filecheck_flags.extend(split_flags(&flags));
}
@ -588,9 +645,12 @@ impl TestProps {
self.update_add_core_stubs(ln, config);
if let Some(err_kind) =
config.parse_name_value_directive(ln, DONT_REQUIRE_ANNOTATIONS)
{
if let Some(err_kind) = config.parse_name_value_directive(
ln,
DONT_REQUIRE_ANNOTATIONS,
testfile,
line_number,
) {
self.dont_require_annotations
.insert(ErrorKind::expect_from_user_str(err_kind.trim()));
}
@ -769,296 +829,6 @@ fn line_directive<'line>(
Some(DirectiveLine { line_number, revision, raw_directive })
}
/// This was originally generated by collecting directives from ui tests and then extracting their
/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is
/// a best-effort approximation for diagnostics. Add new directives to this list when needed.
const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
// tidy-alphabetical-start
"add-core-stubs",
"assembly-output",
"aux-bin",
"aux-build",
"aux-codegen-backend",
"aux-crate",
"build-aux-docs",
"build-fail",
"build-pass",
"check-fail",
"check-pass",
"check-run-results",
"check-stdout",
"check-test-line-numbers-match",
"compile-flags",
"doc-flags",
"dont-check-compiler-stderr",
"dont-check-compiler-stdout",
"dont-check-failure-status",
"dont-require-annotations",
"edition",
"error-pattern",
"exact-llvm-major-version",
"exec-env",
"failure-status",
"filecheck-flags",
"forbid-output",
"force-host",
"ignore-16bit",
"ignore-32bit",
"ignore-64bit",
"ignore-aarch64",
"ignore-aarch64-pc-windows-msvc",
"ignore-aarch64-unknown-linux-gnu",
"ignore-aix",
"ignore-android",
"ignore-apple",
"ignore-arm",
"ignore-arm-unknown-linux-gnueabi",
"ignore-arm-unknown-linux-gnueabihf",
"ignore-arm-unknown-linux-musleabi",
"ignore-arm-unknown-linux-musleabihf",
"ignore-auxiliary",
"ignore-avr",
"ignore-backends",
"ignore-beta",
"ignore-cdb",
"ignore-compare-mode-next-solver",
"ignore-compare-mode-polonius",
"ignore-coverage-map",
"ignore-coverage-run",
"ignore-cross-compile",
"ignore-eabi",
"ignore-elf",
"ignore-emscripten",
"ignore-endian-big",
"ignore-enzyme",
"ignore-freebsd",
"ignore-fuchsia",
"ignore-gdb",
"ignore-gdb-version",
"ignore-gnu",
"ignore-haiku",
"ignore-horizon",
"ignore-i686-pc-windows-gnu",
"ignore-i686-pc-windows-msvc",
"ignore-illumos",
"ignore-ios",
"ignore-linux",
"ignore-lldb",
"ignore-llvm-version",
"ignore-loongarch32",
"ignore-loongarch64",
"ignore-macabi",
"ignore-macos",
"ignore-msp430",
"ignore-msvc",
"ignore-musl",
"ignore-netbsd",
"ignore-nightly",
"ignore-none",
"ignore-nto",
"ignore-nvptx64",
"ignore-nvptx64-nvidia-cuda",
"ignore-openbsd",
"ignore-pass",
"ignore-powerpc",
"ignore-powerpc64",
"ignore-remote",
"ignore-riscv64",
"ignore-rustc-debug-assertions",
"ignore-rustc_abi-x86-sse2",
"ignore-s390x",
"ignore-sgx",
"ignore-sparc64",
"ignore-spirv",
"ignore-stable",
"ignore-stage1",
"ignore-stage2",
"ignore-std-debug-assertions",
"ignore-test",
"ignore-thumb",
"ignore-thumbv8m.base-none-eabi",
"ignore-thumbv8m.main-none-eabi",
"ignore-tvos",
"ignore-unix",
"ignore-unknown",
"ignore-uwp",
"ignore-visionos",
"ignore-vxworks",
"ignore-wasi",
"ignore-wasm",
"ignore-wasm32",
"ignore-wasm32-bare",
"ignore-wasm64",
"ignore-watchos",
"ignore-windows",
"ignore-windows-gnu",
"ignore-windows-msvc",
"ignore-x32",
"ignore-x86",
"ignore-x86_64",
"ignore-x86_64-apple-darwin",
"ignore-x86_64-pc-windows-gnu",
"ignore-x86_64-unknown-linux-gnu",
"incremental",
"known-bug",
"llvm-cov-flags",
"max-llvm-major-version",
"min-cdb-version",
"min-gdb-version",
"min-lldb-version",
"min-llvm-version",
"min-system-llvm-version",
"needs-asm-support",
"needs-backends",
"needs-crate-type",
"needs-deterministic-layouts",
"needs-dlltool",
"needs-dynamic-linking",
"needs-enzyme",
"needs-force-clang-based-tests",
"needs-git-hash",
"needs-llvm-components",
"needs-llvm-zstd",
"needs-profiler-runtime",
"needs-relocation-model-pic",
"needs-run-enabled",
"needs-rust-lld",
"needs-rustc-debug-assertions",
"needs-sanitizer-address",
"needs-sanitizer-cfi",
"needs-sanitizer-dataflow",
"needs-sanitizer-hwaddress",
"needs-sanitizer-kcfi",
"needs-sanitizer-leak",
"needs-sanitizer-memory",
"needs-sanitizer-memtag",
"needs-sanitizer-safestack",
"needs-sanitizer-shadow-call-stack",
"needs-sanitizer-support",
"needs-sanitizer-thread",
"needs-std-debug-assertions",
"needs-subprocess",
"needs-symlink",
"needs-target-has-atomic",
"needs-target-std",
"needs-threads",
"needs-unwind",
"needs-wasmtime",
"needs-xray",
"no-auto-check-cfg",
"no-prefer-dynamic",
"normalize-stderr",
"normalize-stderr-32bit",
"normalize-stderr-64bit",
"normalize-stdout",
"only-16bit",
"only-32bit",
"only-64bit",
"only-aarch64",
"only-aarch64-apple-darwin",
"only-aarch64-unknown-linux-gnu",
"only-apple",
"only-arm",
"only-avr",
"only-beta",
"only-bpf",
"only-cdb",
"only-dist",
"only-elf",
"only-emscripten",
"only-gnu",
"only-i686-pc-windows-gnu",
"only-i686-pc-windows-msvc",
"only-i686-unknown-linux-gnu",
"only-ios",
"only-linux",
"only-loongarch32",
"only-loongarch64",
"only-loongarch64-unknown-linux-gnu",
"only-macos",
"only-mips",
"only-mips64",
"only-msp430",
"only-msvc",
"only-musl",
"only-nightly",
"only-nvptx64",
"only-powerpc",
"only-riscv64",
"only-rustc_abi-x86-sse2",
"only-s390x",
"only-sparc",
"only-sparc64",
"only-stable",
"only-thumb",
"only-tvos",
"only-uefi",
"only-unix",
"only-visionos",
"only-wasm32",
"only-wasm32-bare",
"only-wasm32-wasip1",
"only-watchos",
"only-windows",
"only-windows-gnu",
"only-windows-msvc",
"only-x86",
"only-x86_64",
"only-x86_64-apple-darwin",
"only-x86_64-fortanix-unknown-sgx",
"only-x86_64-pc-windows-gnu",
"only-x86_64-pc-windows-msvc",
"only-x86_64-unknown-linux-gnu",
"pp-exact",
"pretty-compare-only",
"pretty-mode",
"proc-macro",
"reference",
"regex-error-pattern",
"remap-src-base",
"revisions",
"run-crash",
"run-fail",
"run-fail-or-crash",
"run-flags",
"run-pass",
"run-rustfix",
"rustc-env",
"rustfix-only-machine-applicable",
"should-fail",
"should-ice",
"stderr-per-bitwidth",
"test-mir-pass",
"unique-doc-out-dir",
"unset-exec-env",
"unset-rustc-env",
// Used by the tidy check `unknown_revision`.
"unused-revision-names",
// tidy-alphabetical-end
];
const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[
"count",
"!count",
"files",
"!files",
"has",
"!has",
"has-dir",
"!has-dir",
"hasraw",
"!hasraw",
"matches",
"!matches",
"matchesraw",
"!matchesraw",
"snapshot",
"!snapshot",
];
const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] =
&["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"];
/// The (partly) broken-down contents of a line containing a test directive,
/// which [`iter_directives`] passes to its callback function.
///
@ -1206,6 +976,7 @@ impl Config {
fn parse_and_update_revisions(
&self,
testfile: &Utf8Path,
line_number: usize,
line: &str,
existing: &mut Vec<String>,
) {
@ -1219,7 +990,8 @@ impl Config {
const FILECHECK_FORBIDDEN_REVISION_NAMES: [&str; 9] =
["CHECK", "COM", "NEXT", "SAME", "EMPTY", "NOT", "COUNT", "DAG", "LABEL"];
if let Some(raw) = self.parse_name_value_directive(line, "revisions") {
if let Some(raw) = self.parse_name_value_directive(line, "revisions", testfile, line_number)
{
if self.mode == TestMode::RunMake {
panic!("`run-make` tests do not support revisions: {}", testfile);
}
@ -1264,8 +1036,13 @@ impl Config {
(name.to_owned(), value.to_owned())
}
fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option<Utf8PathBuf> {
if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
fn parse_pp_exact(
&self,
line: &str,
testfile: &Utf8Path,
line_number: usize,
) -> Option<Utf8PathBuf> {
if let Some(s) = self.parse_name_value_directive(line, "pp-exact", testfile, line_number) {
Some(Utf8PathBuf::from(&s))
} else if self.parse_name_directive(line, "pp-exact") {
testfile.file_name().map(Utf8PathBuf::from)
@ -1306,19 +1083,31 @@ impl Config {
line.starts_with("no-") && self.parse_name_directive(&line[3..], directive)
}
pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
pub fn parse_name_value_directive(
&self,
line: &str,
directive: &str,
testfile: &Utf8Path,
line_number: usize,
) -> Option<String> {
let colon = directive.len();
if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') {
let value = line[(colon + 1)..].to_owned();
debug!("{}: {}", directive, value);
Some(expand_variables(value, self))
let value = expand_variables(value, self);
if value.is_empty() {
error!("{testfile}:{line_number}: empty value for directive `{directive}`");
help!("expected syntax is: `{directive}: value`");
panic!("empty directive value detected");
}
Some(value)
} else {
None
}
}
fn parse_edition(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "edition")
fn parse_edition(&self, line: &str, testfile: &Utf8Path, line_number: usize) -> Option<String> {
self.parse_name_value_directive(line, "edition", testfile, line_number)
}
fn set_name_directive(&self, line: &str, directive: &str, value: &mut bool) {
@ -1340,11 +1129,14 @@ impl Config {
&self,
line: &str,
directive: &str,
testfile: &Utf8Path,
line_number: usize,
value: &mut Option<T>,
parse: impl FnOnce(String) -> T,
) {
if value.is_none() {
*value = self.parse_name_value_directive(line, directive).map(parse);
*value =
self.parse_name_value_directive(line, directive, testfile, line_number).map(parse);
}
}
@ -1352,10 +1144,14 @@ impl Config {
&self,
line: &str,
directive: &str,
testfile: &Utf8Path,
line_number: usize,
values: &mut Vec<T>,
parse: impl FnOnce(String) -> T,
) {
if let Some(value) = self.parse_name_value_directive(line, directive).map(parse) {
if let Some(value) =
self.parse_name_value_directive(line, directive, testfile, line_number).map(parse)
{
values.push(value);
}
}
@ -1672,9 +1468,9 @@ pub(crate) fn make_test_description<R: Read>(
decision!(cfg::handle_ignore(config, ln));
decision!(cfg::handle_only(config, ln));
decision!(needs::handle_needs(&cache.needs, config, ln));
decision!(ignore_llvm(config, path, ln));
decision!(ignore_backends(config, path, ln));
decision!(needs_backends(config, path, ln));
decision!(ignore_llvm(config, path, ln, line_number));
decision!(ignore_backends(config, path, ln, line_number));
decision!(needs_backends(config, path, ln, line_number));
decision!(ignore_cdb(config, ln));
decision!(ignore_gdb(config, ln));
decision!(ignore_lldb(config, ln));
@ -1801,8 +1597,15 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision {
IgnoreDecision::Continue
}
fn ignore_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
if let Some(backends_to_ignore) = config.parse_name_value_directive(line, "ignore-backends") {
fn ignore_backends(
config: &Config,
path: &Utf8Path,
line: &str,
line_number: usize,
) -> IgnoreDecision {
if let Some(backends_to_ignore) =
config.parse_name_value_directive(line, "ignore-backends", path, line_number)
{
for backend in backends_to_ignore.split_whitespace().map(|backend| {
match CodegenBackend::try_from(backend) {
Ok(backend) => backend,
@ -1821,8 +1624,15 @@ fn ignore_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecisi
IgnoreDecision::Continue
}
fn needs_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
if let Some(needed_backends) = config.parse_name_value_directive(line, "needs-backends") {
fn needs_backends(
config: &Config,
path: &Utf8Path,
line: &str,
line_number: usize,
) -> IgnoreDecision {
if let Some(needed_backends) =
config.parse_name_value_directive(line, "needs-backends", path, line_number)
{
if !needed_backends
.split_whitespace()
.map(|backend| match CodegenBackend::try_from(backend) {
@ -1844,9 +1654,9 @@ fn needs_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecisio
IgnoreDecision::Continue
}
fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str, line_number: usize) -> IgnoreDecision {
if let Some(needed_components) =
config.parse_name_value_directive(line, "needs-llvm-components")
config.parse_name_value_directive(line, "needs-llvm-components", path, line_number)
{
let components: HashSet<_> = config.llvm_components.split_whitespace().collect();
if let Some(missing_component) = needed_components
@ -1867,7 +1677,9 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
if let Some(actual_version) = &config.llvm_version {
// Note that these `min` versions will check for not just major versions.
if let Some(version_string) = config.parse_name_value_directive(line, "min-llvm-version") {
if let Some(version_string) =
config.parse_name_value_directive(line, "min-llvm-version", path, line_number)
{
let min_version = extract_llvm_version(&version_string);
// Ignore if actual version is smaller than the minimum required version.
if *actual_version < min_version {
@ -1878,7 +1690,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
};
}
} else if let Some(version_string) =
config.parse_name_value_directive(line, "max-llvm-major-version")
config.parse_name_value_directive(line, "max-llvm-major-version", path, line_number)
{
let max_version = extract_llvm_version(&version_string);
// Ignore if actual major version is larger than the maximum required major version.
@ -1892,7 +1704,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
};
}
} else if let Some(version_string) =
config.parse_name_value_directive(line, "min-system-llvm-version")
config.parse_name_value_directive(line, "min-system-llvm-version", path, line_number)
{
let min_version = extract_llvm_version(&version_string);
// Ignore if using system LLVM and actual version
@ -1905,7 +1717,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
};
}
} else if let Some(version_range) =
config.parse_name_value_directive(line, "ignore-llvm-version")
config.parse_name_value_directive(line, "ignore-llvm-version", path, line_number)
{
// Syntax is: "ignore-llvm-version: <version1> [- <version2>]"
let (v_min, v_max) =
@ -1931,7 +1743,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
}
}
} else if let Some(version_string) =
config.parse_name_value_directive(line, "exact-llvm-major-version")
config.parse_name_value_directive(line, "exact-llvm-major-version", path, line_number)
{
// Syntax is "exact-llvm-major-version: <version>"
let version = extract_llvm_version(&version_string);

View file

@ -3,6 +3,8 @@
use std::iter;
use camino::Utf8Path;
use super::directives::{AUX_BIN, AUX_BUILD, AUX_CODEGEN_BACKEND, AUX_CRATE, PROC_MACRO};
use crate::common::Config;
@ -41,17 +43,42 @@ impl AuxProps {
/// If the given test directive line contains an `aux-*` directive, parse it
/// and update [`AuxProps`] accordingly.
pub(super) fn parse_and_update_aux(config: &Config, ln: &str, aux: &mut AuxProps) {
pub(super) fn parse_and_update_aux(
config: &Config,
ln: &str,
testfile: &Utf8Path,
line_number: usize,
aux: &mut AuxProps,
) {
if !(ln.starts_with("aux-") || ln.starts_with("proc-macro")) {
return;
}
config.push_name_value_directive(ln, AUX_BUILD, &mut aux.builds, |r| r.trim().to_string());
config.push_name_value_directive(ln, AUX_BIN, &mut aux.bins, |r| r.trim().to_string());
config.push_name_value_directive(ln, AUX_CRATE, &mut aux.crates, parse_aux_crate);
config
.push_name_value_directive(ln, PROC_MACRO, &mut aux.proc_macros, |r| r.trim().to_string());
if let Some(r) = config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND) {
config.push_name_value_directive(ln, AUX_BUILD, testfile, line_number, &mut aux.builds, |r| {
r.trim().to_string()
});
config.push_name_value_directive(ln, AUX_BIN, testfile, line_number, &mut aux.bins, |r| {
r.trim().to_string()
});
config.push_name_value_directive(
ln,
AUX_CRATE,
testfile,
line_number,
&mut aux.crates,
parse_aux_crate,
);
config.push_name_value_directive(
ln,
PROC_MACRO,
testfile,
line_number,
&mut aux.proc_macros,
|r| r.trim().to_string(),
);
if let Some(r) =
config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND, testfile, line_number)
{
aux.codegen_backend = Some(r.trim().to_owned());
}
}

View file

@ -0,0 +1,289 @@
/// This was originally generated by collecting directives from ui tests and then extracting their
/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is
/// a best-effort approximation for diagnostics. Add new directives to this list when needed.
pub(crate) const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
// tidy-alphabetical-start
"add-core-stubs",
"assembly-output",
"aux-bin",
"aux-build",
"aux-codegen-backend",
"aux-crate",
"build-aux-docs",
"build-fail",
"build-pass",
"check-fail",
"check-pass",
"check-run-results",
"check-stdout",
"check-test-line-numbers-match",
"compile-flags",
"doc-flags",
"dont-check-compiler-stderr",
"dont-check-compiler-stdout",
"dont-check-failure-status",
"dont-require-annotations",
"edition",
"error-pattern",
"exact-llvm-major-version",
"exec-env",
"failure-status",
"filecheck-flags",
"forbid-output",
"force-host",
"ignore-16bit",
"ignore-32bit",
"ignore-64bit",
"ignore-aarch64",
"ignore-aarch64-pc-windows-msvc",
"ignore-aarch64-unknown-linux-gnu",
"ignore-aix",
"ignore-android",
"ignore-apple",
"ignore-arm",
"ignore-arm-unknown-linux-gnueabi",
"ignore-arm-unknown-linux-gnueabihf",
"ignore-arm-unknown-linux-musleabi",
"ignore-arm-unknown-linux-musleabihf",
"ignore-auxiliary",
"ignore-avr",
"ignore-backends",
"ignore-beta",
"ignore-cdb",
"ignore-compare-mode-next-solver",
"ignore-compare-mode-polonius",
"ignore-coverage-map",
"ignore-coverage-run",
"ignore-cross-compile",
"ignore-eabi",
"ignore-elf",
"ignore-emscripten",
"ignore-endian-big",
"ignore-enzyme",
"ignore-freebsd",
"ignore-fuchsia",
"ignore-gdb",
"ignore-gdb-version",
"ignore-gnu",
"ignore-haiku",
"ignore-horizon",
"ignore-i686-pc-windows-gnu",
"ignore-i686-pc-windows-msvc",
"ignore-illumos",
"ignore-ios",
"ignore-linux",
"ignore-lldb",
"ignore-llvm-version",
"ignore-loongarch32",
"ignore-loongarch64",
"ignore-macabi",
"ignore-macos",
"ignore-msp430",
"ignore-msvc",
"ignore-musl",
"ignore-netbsd",
"ignore-nightly",
"ignore-none",
"ignore-nto",
"ignore-nvptx64",
"ignore-nvptx64-nvidia-cuda",
"ignore-openbsd",
"ignore-pass",
"ignore-powerpc",
"ignore-powerpc64",
"ignore-remote",
"ignore-riscv64",
"ignore-rustc-debug-assertions",
"ignore-rustc_abi-x86-sse2",
"ignore-s390x",
"ignore-sgx",
"ignore-sparc64",
"ignore-spirv",
"ignore-stable",
"ignore-stage1",
"ignore-stage2",
"ignore-std-debug-assertions",
"ignore-test",
"ignore-thumb",
"ignore-thumbv8m.base-none-eabi",
"ignore-thumbv8m.main-none-eabi",
"ignore-tvos",
"ignore-unix",
"ignore-unknown",
"ignore-uwp",
"ignore-visionos",
"ignore-vxworks",
"ignore-wasi",
"ignore-wasm",
"ignore-wasm32",
"ignore-wasm32-bare",
"ignore-wasm64",
"ignore-watchos",
"ignore-windows",
"ignore-windows-gnu",
"ignore-windows-msvc",
"ignore-x32",
"ignore-x86",
"ignore-x86_64",
"ignore-x86_64-apple-darwin",
"ignore-x86_64-pc-windows-gnu",
"ignore-x86_64-unknown-linux-gnu",
"incremental",
"known-bug",
"llvm-cov-flags",
"max-llvm-major-version",
"min-cdb-version",
"min-gdb-version",
"min-lldb-version",
"min-llvm-version",
"min-system-llvm-version",
"needs-asm-support",
"needs-backends",
"needs-crate-type",
"needs-deterministic-layouts",
"needs-dlltool",
"needs-dynamic-linking",
"needs-enzyme",
"needs-force-clang-based-tests",
"needs-git-hash",
"needs-llvm-components",
"needs-llvm-zstd",
"needs-profiler-runtime",
"needs-relocation-model-pic",
"needs-run-enabled",
"needs-rust-lld",
"needs-rustc-debug-assertions",
"needs-sanitizer-address",
"needs-sanitizer-cfi",
"needs-sanitizer-dataflow",
"needs-sanitizer-hwaddress",
"needs-sanitizer-kcfi",
"needs-sanitizer-leak",
"needs-sanitizer-memory",
"needs-sanitizer-memtag",
"needs-sanitizer-safestack",
"needs-sanitizer-shadow-call-stack",
"needs-sanitizer-support",
"needs-sanitizer-thread",
"needs-std-debug-assertions",
"needs-subprocess",
"needs-symlink",
"needs-target-has-atomic",
"needs-target-std",
"needs-threads",
"needs-unwind",
"needs-wasmtime",
"needs-xray",
"no-auto-check-cfg",
"no-prefer-dynamic",
"normalize-stderr",
"normalize-stderr-32bit",
"normalize-stderr-64bit",
"normalize-stdout",
"only-16bit",
"only-32bit",
"only-64bit",
"only-aarch64",
"only-aarch64-apple-darwin",
"only-aarch64-unknown-linux-gnu",
"only-apple",
"only-arm",
"only-avr",
"only-beta",
"only-bpf",
"only-cdb",
"only-dist",
"only-elf",
"only-emscripten",
"only-gnu",
"only-i686-pc-windows-gnu",
"only-i686-pc-windows-msvc",
"only-i686-unknown-linux-gnu",
"only-ios",
"only-linux",
"only-loongarch32",
"only-loongarch64",
"only-loongarch64-unknown-linux-gnu",
"only-macos",
"only-mips",
"only-mips64",
"only-msp430",
"only-msvc",
"only-musl",
"only-nightly",
"only-nvptx64",
"only-powerpc",
"only-riscv64",
"only-rustc_abi-x86-sse2",
"only-s390x",
"only-sparc",
"only-sparc64",
"only-stable",
"only-thumb",
"only-tvos",
"only-uefi",
"only-unix",
"only-visionos",
"only-wasm32",
"only-wasm32-bare",
"only-wasm32-wasip1",
"only-watchos",
"only-windows",
"only-windows-gnu",
"only-windows-msvc",
"only-x86",
"only-x86_64",
"only-x86_64-apple-darwin",
"only-x86_64-fortanix-unknown-sgx",
"only-x86_64-pc-windows-gnu",
"only-x86_64-pc-windows-msvc",
"only-x86_64-unknown-linux-gnu",
"pp-exact",
"pretty-compare-only",
"pretty-mode",
"proc-macro",
"reference",
"regex-error-pattern",
"remap-src-base",
"revisions",
"run-crash",
"run-fail",
"run-fail-or-crash",
"run-flags",
"run-pass",
"run-rustfix",
"rustc-env",
"rustfix-only-machine-applicable",
"should-fail",
"should-ice",
"stderr-per-bitwidth",
"test-mir-pass",
"unique-doc-out-dir",
"unset-exec-env",
"unset-rustc-env",
// Used by the tidy check `unknown_revision`.
"unused-revision-names",
// tidy-alphabetical-end
];
pub(crate) const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[
"count",
"!count",
"files",
"!files",
"has",
"!has",
"has-dir",
"!has-dir",
"hasraw",
"!hasraw",
"matches",
"!matches",
"matchesraw",
"!matchesraw",
"snapshot",
"!snapshot",
];
pub(crate) const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] =
&["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"];

View file

@ -47,10 +47,14 @@ impl DebuggerCommands {
continue;
};
if let Some(command) = config.parse_name_value_directive(&line, &command_directive) {
if let Some(command) =
config.parse_name_value_directive(&line, &command_directive, file, line_no)
{
commands.push(command);
}
if let Some(pattern) = config.parse_name_value_directive(&line, &check_directive) {
if let Some(pattern) =
config.parse_name_value_directive(&line, &check_directive, file, line_no)
{
check_lines.push((line_no, pattern));
}
}

View file

@ -9,7 +9,7 @@ description = "Produces a manifest of all the copyrighted materials in the Rust
[dependencies]
anyhow = "1.0.65"
askama = "0.14.0"
cargo_metadata = "0.18.1"
cargo_metadata = "0.21"
serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.85"
thiserror = "1"

View file

@ -92,7 +92,8 @@ pub fn get_metadata(
continue;
}
// otherwise it's an out-of-tree dependency
let package_id = Package { name: package.name, version: package.version.to_string() };
let package_id =
Package { name: package.name.to_string(), version: package.version.to_string() };
output.insert(
package_id,
PackageMetadata {

View file

@ -1,7 +1,7 @@
[package]
name = "linkchecker"
version = "0.1.0"
edition = "2021"
edition = "2024"
[[bin]]
name = "linkchecker"

View file

@ -17,12 +17,13 @@
//! should catch the majority of "broken link" cases.
use std::cell::{Cell, RefCell};
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
use std::io::ErrorKind;
use std::fs;
use std::iter::once;
use std::path::{Component, Path, PathBuf};
use std::rc::Rc;
use std::time::Instant;
use std::{env, fs};
use html5ever::tendril::ByteTendril;
use html5ever::tokenizer::{
@ -110,10 +111,25 @@ macro_rules! t {
};
}
struct Cli {
docs: PathBuf,
link_targets_dirs: Vec<PathBuf>,
}
fn main() {
let docs = env::args_os().nth(1).expect("doc path should be first argument");
let docs = env::current_dir().unwrap().join(docs);
let mut checker = Checker { root: docs.clone(), cache: HashMap::new() };
let cli = match parse_cli() {
Ok(cli) => cli,
Err(err) => {
eprintln!("error: {err}");
usage_and_exit(1);
}
};
let mut checker = Checker {
root: cli.docs.clone(),
link_targets_dirs: cli.link_targets_dirs,
cache: HashMap::new(),
};
let mut report = Report {
errors: 0,
start: Instant::now(),
@ -125,7 +141,7 @@ fn main() {
intra_doc_exceptions: 0,
has_broken_urls: false,
};
checker.walk(&docs, &mut report);
checker.walk(&cli.docs, &mut report);
report.report();
if report.errors != 0 {
println!("found some broken links");
@ -133,8 +149,50 @@ fn main() {
}
}
fn parse_cli() -> Result<Cli, String> {
fn to_absolute_path(arg: &str) -> Result<PathBuf, String> {
std::path::absolute(arg).map_err(|e| format!("could not convert to absolute {arg}: {e}"))
}
let mut verbatim = false;
let mut docs = None;
let mut link_targets_dirs = Vec::new();
let mut args = std::env::args().skip(1);
while let Some(arg) = args.next() {
if !verbatim && arg == "--" {
verbatim = true;
} else if !verbatim && (arg == "-h" || arg == "--help") {
usage_and_exit(0)
} else if !verbatim && arg == "--link-targets-dir" {
link_targets_dirs.push(to_absolute_path(
&args.next().ok_or("missing value for --link-targets-dir")?,
)?);
} else if !verbatim && let Some(value) = arg.strip_prefix("--link-targets-dir=") {
link_targets_dirs.push(to_absolute_path(value)?);
} else if !verbatim && arg.starts_with('-') {
return Err(format!("unknown flag: {arg}"));
} else if docs.is_none() {
docs = Some(arg);
} else {
return Err("too many positional arguments".into());
}
}
Ok(Cli {
docs: to_absolute_path(&docs.ok_or("missing first positional argument")?)?,
link_targets_dirs,
})
}
fn usage_and_exit(code: i32) -> ! {
eprintln!("usage: linkchecker PATH [--link-targets-dir=PATH ...]");
std::process::exit(code)
}
struct Checker {
root: PathBuf,
link_targets_dirs: Vec<PathBuf>,
cache: Cache,
}
@ -420,37 +478,34 @@ impl Checker {
/// Load a file from disk, or from the cache if available.
fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry) {
// https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
#[cfg(windows)]
const ERROR_INVALID_NAME: i32 = 123;
let pretty_path =
file.strip_prefix(&self.root).unwrap_or(file).to_str().unwrap().to_string();
let entry =
self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) {
for base in once(&self.root).chain(self.link_targets_dirs.iter()) {
let entry = self.cache.entry(pretty_path.clone());
if let Entry::Occupied(e) = &entry
&& !matches!(e.get(), FileEntry::Missing)
{
break;
}
let file = base.join(&pretty_path);
entry.insert_entry(match fs::metadata(&file) {
Ok(metadata) if metadata.is_dir() => FileEntry::Dir,
Ok(_) => {
if file.extension().and_then(|s| s.to_str()) != Some("html") {
FileEntry::OtherFile
} else {
report.html_files += 1;
load_html_file(file, report)
load_html_file(&file, report)
}
}
Err(e) if e.kind() == ErrorKind::NotFound => FileEntry::Missing,
Err(e) => {
// If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` rather than `NotFound`.
// Explicitly check for that so that the broken link can be allowed in `LINKCHECK_EXCEPTIONS`.
#[cfg(windows)]
if e.raw_os_error() == Some(ERROR_INVALID_NAME)
&& file.as_os_str().to_str().map_or(false, |s| s.contains("::"))
{
return FileEntry::Missing;
}
panic!("unexpected read error for {}: {}", file.display(), e);
}
Err(e) if is_not_found_error(&file, &e) => FileEntry::Missing,
Err(e) => panic!("unexpected read error for {}: {}", file.display(), e),
});
}
let entry = self.cache.get(&pretty_path).unwrap();
(pretty_path, entry)
}
}
@ -629,3 +684,16 @@ fn parse_ids(ids: &mut HashSet<String>, file: &str, source: &str, report: &mut R
ids.insert(encoded);
}
}
fn is_not_found_error(path: &Path, error: &std::io::Error) -> bool {
// https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
const WINDOWS_ERROR_INVALID_NAME: i32 = 123;
error.kind() == std::io::ErrorKind::NotFound
// If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME`
// rather than `NotFound`. Explicitly check for that so that the broken link can be allowed
// in `LINKCHECK_EXCEPTIONS`.
|| (cfg!(windows)
&& error.raw_os_error() == Some(WINDOWS_ERROR_INVALID_NAME)
&& path.as_os_str().to_str().map_or(false, |s| s.contains("::")))
}

View file

@ -45,11 +45,17 @@ jobs:
os: macos-latest
- host_target: i686-pc-windows-msvc
os: windows-latest
- host_target: aarch64-pc-windows-msvc
os: windows-11-arm
runs-on: ${{ matrix.os }}
env:
HOST_TARGET: ${{ matrix.host_target }}
steps:
- uses: actions/checkout@v4
- name: apt update
if: ${{ startsWith(matrix.os, 'ubuntu') }}
# The runners seem to have outdated apt repos sometimes
run: sudo apt update
- name: install qemu
if: ${{ matrix.qemu }}
run: sudo apt install qemu-user qemu-user-binfmt
@ -63,6 +69,12 @@ jobs:
sudo apt update
# Install needed packages
sudo apt install $(echo "libatomic1: zlib1g-dev:" | sed 's/:/:${{ matrix.multiarch }}/g')
- name: Install rustup on Windows ARM
if: ${{ matrix.os == 'windows-11-arm' }}
run: |
curl -LOs https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe
./rustup-init.exe -y --no-modify-path
echo "$USERPROFILE/.cargo/bin" >> "$GITHUB_PATH"
- uses: ./.github/workflows/setup
with:
toolchain_flags: "--host ${{ matrix.host_target }}"
@ -147,35 +159,48 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 256 # get a bit more of the history
- name: install josh-proxy
run: cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04
- name: install josh-sync
run: cargo +stable install --locked --git https://github.com/rust-lang/josh-sync
- name: setup bot git name and email
run: |
git config --global user.name 'The Miri Cronjob Bot'
git config --global user.email 'miri@cron.bot'
- name: Install nightly toolchain
run: rustup toolchain install nightly --profile minimal
- name: get changes from rustc
run: ./miri rustc-pull
- name: Install rustup-toolchain-install-master
run: cargo install -f rustup-toolchain-install-master
- name: format changes (if any)
run: |
./miri toolchain
./miri fmt --check || (./miri fmt && git commit -am "fmt")
- name: Push changes to a branch and create PR
run: |
# `git diff --exit-code` "succeeds" if the diff is empty.
if git diff --exit-code HEAD^; then echo "Nothing changed in rustc, skipping PR"; exit 0; fi
# The diff is non-empty, create a PR.
# Make it easier to see what happens.
set -x
# Temporarily disable early exit to examine the status code of rustc-josh-sync
set +e
rustc-josh-sync pull
exitcode=$?
set -e
# If there were no changes to pull, rustc-josh-sync returns status code 2.
# In that case, skip the rest of the job.
if [ $exitcode -eq 2 ]; then
echo "Nothing changed in rustc, skipping PR"
exit 0
elif [ $exitcode -ne 0 ]; then
# If return code was not 0 or 2, rustc-josh-sync actually failed
echo "error: rustc-josh-sync failed"
exit ${exitcode}
fi
# Format changes
./miri toolchain
./miri fmt --check || (./miri fmt && git commit -am "fmt")
# Create a PR
BRANCH="rustup-$(date -u +%Y-%m-%d)"
git switch -c $BRANCH
git push -u origin $BRANCH
gh pr create -B master --title 'Automatic Rustup' --body 'Please close and re-open this PR to trigger CI, then enable auto-merge.'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }}
ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }}
cron-fail-notify:
name: cronjob failure notification
@ -198,7 +223,7 @@ jobs:
It would appear that the [Miri cron job build]('"https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"') failed.
This likely means that rustc changed the miri directory and
we now need to do a [`./miri rustc-pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo).
we now need to do a [`rustc-josh-sync pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo).
Would you mind investigating this issue?

View file

@ -1,5 +1,4 @@
target
/doc
tex/*/out
*.dot
*.out

View file

@ -297,14 +297,14 @@ You can also directly run Miri on a Rust source file:
## Advanced topic: Syncing with the rustc repo
We use the [`josh` proxy](https://github.com/josh-project/josh) to transmit changes between the
We use the [`josh-sync`](https://github.com/rust-lang/josh-sync) tool to transmit changes between the
rustc and Miri repositories. You can install it as follows:
```sh
cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04
cargo install --locked --git https://github.com/rust-lang/josh-sync
```
Josh will automatically be started and stopped by `./miri`.
The commands below will automatically install and manage the [Josh](https://github.com/josh-project/josh) proxy that performs the actual work.
### Importing changes from the rustc repo
@ -312,10 +312,12 @@ Josh will automatically be started and stopped by `./miri`.
We assume we start on an up-to-date master branch in the Miri repo.
1) First, create a branch for the pull, e.g. `git checkout -b rustup`
2) Then run the following:
```sh
# Fetch and merge rustc side of the history. Takes ca 5 min the first time.
# This will also update the `rustc-version` file.
./miri rustc-pull
rustc-josh-sync pull
# Update local toolchain and apply formatting.
./miri toolchain && ./miri fmt
git commit -am "rustup"
@ -328,12 +330,12 @@ needed.
### Exporting changes to the rustc repo
We will use the josh proxy to push to your fork of rustc. Run the following in the Miri repo,
We will use the `josh-sync` tool to push to your fork of rustc. Run the following in the Miri repo,
assuming we are on an up-to-date master branch:
```sh
# Push the Miri changes to your rustc fork (substitute your github handle for YOUR_NAME).
./miri rustc-push YOUR_NAME miri
rustc-josh-sync push miri YOUR_NAME
```
This will create a new branch called `miri` in your fork, and the output should include a link that

View file

@ -166,10 +166,12 @@ dependencies = [
[[package]]
name = "cc"
version = "1.2.30"
version = "1.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
dependencies = [
"jobserver",
"libc",
"shlex",
]
@ -214,6 +216,52 @@ dependencies = [
"inout",
]
[[package]]
name = "clap"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_lex"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "cmake"
version = "0.1.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0"
dependencies = [
"cc",
]
[[package]]
name = "codespan-reporting"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
dependencies = [
"serde",
"termcolor",
"unicode-width 0.2.1",
]
[[package]]
name = "color-eyre"
version = "0.6.5"
@ -313,6 +361,68 @@ dependencies = [
"typenum",
]
[[package]]
name = "cxx"
version = "1.0.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3523cc02ad831111491dd64b27ad999f1ae189986728e477604e61b81f828df"
dependencies = [
"cc",
"cxxbridge-cmd",
"cxxbridge-flags",
"cxxbridge-macro",
"foldhash",
"link-cplusplus",
]
[[package]]
name = "cxx-build"
version = "1.0.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "212b754247a6f07b10fa626628c157593f0abf640a3dd04cce2760eca970f909"
dependencies = [
"cc",
"codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
"scratch",
"syn",
]
[[package]]
name = "cxxbridge-cmd"
version = "1.0.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f426a20413ec2e742520ba6837c9324b55ffac24ead47491a6e29f933c5b135a"
dependencies = [
"clap",
"codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "cxxbridge-flags"
version = "1.0.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a258b6069020b4e5da6415df94a50ee4f586a6c38b037a180e940a43d06a070d"
[[package]]
name = "cxxbridge-macro"
version = "1.0.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8dec184b52be5008d6eaf7e62fc1802caf1ad1227d11b3b7df2c409c7ffc3f4"
dependencies = [
"indexmap",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "directories"
version = "6.0.0"
@ -334,12 +444,29 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "encode_unicode"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.13"
@ -372,6 +499,21 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@ -382,6 +524,17 @@ dependencies = [
"version_check",
]
[[package]]
name = "genmc-sys"
version = "0.1.0"
dependencies = [
"cc",
"cmake",
"cxx",
"cxx-build",
"git2",
]
[[package]]
name = "getrandom"
version = "0.2.16"
@ -411,12 +564,150 @@ version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
[[package]]
name = "git2"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110"
dependencies = [
"bitflags",
"libc",
"libgit2-sys",
"log",
"openssl-probe",
"openssl-sys",
"url",
]
[[package]]
name = "hashbrown"
version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
[[package]]
name = "icu_collections"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
dependencies = [
"displaydoc",
"potential_utf",
"yoke",
"zerofrom",
"zerovec",
]
[[package]]
name = "icu_locale_core"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
dependencies = [
"displaydoc",
"litemap",
"tinystr",
"writeable",
"zerovec",
]
[[package]]
name = "icu_normalizer"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
dependencies = [
"displaydoc",
"icu_collections",
"icu_normalizer_data",
"icu_properties",
"icu_provider",
"smallvec",
"zerovec",
]
[[package]]
name = "icu_normalizer_data"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
[[package]]
name = "icu_properties"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
dependencies = [
"displaydoc",
"icu_collections",
"icu_locale_core",
"icu_properties_data",
"icu_provider",
"potential_utf",
"zerotrie",
"zerovec",
]
[[package]]
name = "icu_properties_data"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
[[package]]
name = "icu_provider"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
dependencies = [
"displaydoc",
"icu_locale_core",
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"zerofrom",
"zerotrie",
"zerovec",
]
[[package]]
name = "idna"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
dependencies = [
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna_adapter"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
dependencies = [
"icu_normalizer",
"icu_properties",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [
"equivalent",
"hashbrown",
]
[[package]]
name = "indicatif"
version = "0.17.11"
@ -463,6 +754,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a"
dependencies = [
"getrandom 0.3.3",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.77"
@ -510,6 +811,19 @@ dependencies = [
"cc",
]
[[package]]
name = "libgit2-sys"
version = "0.18.2+1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222"
dependencies = [
"cc",
"libc",
"libz-sys",
"openssl-sys",
"pkg-config",
]
[[package]]
name = "libloading"
version = "0.8.8"
@ -530,12 +844,39 @@ dependencies = [
"libc",
]
[[package]]
name = "libz-sys"
version = "1.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "link-cplusplus"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a6f6da007f968f9def0d65a05b187e2960183de70c160204ecfccf0ee330212"
dependencies = [
"cc",
]
[[package]]
name = "linux-raw-sys"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
[[package]]
name = "litemap"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]]
name = "lock_api"
version = "0.4.13"
@ -612,6 +953,7 @@ dependencies = [
"chrono-tz",
"colored 3.0.0",
"directories",
"genmc-sys",
"getrandom 0.3.3",
"ipc-channel",
"libc",
@ -672,6 +1014,24 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "openssl-probe"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-sys"
version = "0.9.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "option-ext"
version = "0.2.0"
@ -722,6 +1082,12 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "perf-event-open-sys"
version = "3.0.0"
@ -755,12 +1121,27 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "portable-atomic"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
[[package]]
name = "potential_utf"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
dependencies = [
"zerovec",
]
[[package]]
name = "ppv-lite86"
version = "0.2.21"
@ -946,6 +1327,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scratch"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f6280af86e5f559536da57a45ebc84948833b3bee313a7dd25232e09c878a52"
[[package]]
name = "semver"
version = "1.0.26"
@ -1025,6 +1412,18 @@ dependencies = [
"color-eyre",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.104"
@ -1036,6 +1435,17 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tempfile"
version = "3.20.0"
@ -1049,6 +1459,15 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@ -1108,6 +1527,16 @@ dependencies = [
"libc",
]
[[package]]
name = "tinystr"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
dependencies = [
"displaydoc",
"zerovec",
]
[[package]]
name = "tracing"
version = "0.1.41"
@ -1199,6 +1628,23 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
[[package]]
name = "url"
version = "2.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "utf8_iter"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "uuid"
version = "1.17.0"
@ -1216,6 +1662,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.9.5"
@ -1305,6 +1757,15 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "windows"
version = "0.58.0"
@ -1524,6 +1985,36 @@ dependencies = [
"bitflags",
]
[[package]]
name = "writeable"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive",
"zerofrom",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.8.26"
@ -1543,3 +2034,57 @@ dependencies = [
"quote",
"syn",
]
[[package]]
name = "zerofrom"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerotrie"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
dependencies = [
"displaydoc",
"yoke",
"zerofrom",
]
[[package]]
name = "zerovec"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
dependencies = [
"yoke",
"zerofrom",
"zerovec-derive",
]
[[package]]
name = "zerovec-derive"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View file

@ -48,6 +48,10 @@ nix = { version = "0.30.1", features = ["mman", "ptrace", "signal"], optional =
ipc-channel = { version = "0.20.0", optional = true }
capstone = { version = "0.13", optional = true }
# FIXME(genmc,macos): Add `target_os = "macos"` once https://github.com/dtolnay/cxx/issues/1535 is fixed.
[target.'cfg(all(target_os = "linux", target_pointer_width = "64", target_endian = "little"))'.dependencies]
genmc-sys = { path = "./genmc-sys/", version = "0.1.0", optional = true }
[dev-dependencies]
ui_test = "0.30.2"
colored = "3"
@ -66,7 +70,7 @@ harness = false
[features]
default = ["stack-cache", "native-lib"]
genmc = []
genmc = ["dep:genmc-sys"] # this enables a GPL dependency!
stack-cache = []
stack-cache-consistency-check = ["stack-cache"]
tracing = ["serde_json"]

View file

@ -1,9 +1,9 @@
//! Implements the various phases of `cargo miri run/test`.
use std::env;
use std::fs::{self, File};
use std::fs::File;
use std::io::BufReader;
use std::path::{Path, PathBuf};
use std::path::{self, Path, PathBuf};
use std::process::Command;
use rustc_version::VersionMeta;
@ -222,12 +222,12 @@ pub fn phase_cargo_miri(mut args: impl Iterator<Item = String>) {
// that to be the Miri driver, but acting as rustc, in host mode.
//
// In `main`, we need the value of `RUSTC` to distinguish RUSTC_WRAPPER invocations from rustdoc
// or TARGET_RUNNER invocations, so we canonicalize it here to make it exceedingly unlikely that
// or TARGET_RUNNER invocations, so we make it absolute to make it exceedingly unlikely that
// there would be a collision with other invocations of cargo-miri (as rustdoc or as runner). We
// explicitly do this even if RUSTC_STAGE is set, since for these builds we do *not* want the
// bootstrap `rustc` thing in our way! Instead, we have MIRI_HOST_SYSROOT to use for host
// builds.
cmd.env("RUSTC", fs::canonicalize(find_miri()).unwrap());
cmd.env("RUSTC", path::absolute(find_miri()).unwrap());
// In case we get invoked as RUSTC without the wrapper, let's be a host rustc. This makes no
// sense for cross-interpretation situations, but without the wrapper, this will use the host
// sysroot, so asking it to behave like a target build makes even less sense.

View file

@ -142,7 +142,6 @@ case $HOST_TARGET in
# Host
GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests
# Extra tier 1
MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests
MANY_SEEDS=64 TEST_TARGET=x86_64-apple-darwin run_tests
MANY_SEEDS=64 TEST_TARGET=x86_64-pc-windows-gnu run_tests
;;
@ -161,8 +160,6 @@ case $HOST_TARGET in
aarch64-unknown-linux-gnu)
# Host
GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests
# Extra tier 1 candidate
MANY_SEEDS=64 TEST_TARGET=aarch64-pc-windows-msvc run_tests
# Extra tier 2
MANY_SEEDS=16 TEST_TARGET=arm-unknown-linux-gnueabi run_tests # 32bit ARM
MANY_SEEDS=16 TEST_TARGET=aarch64-pc-windows-gnullvm run_tests # gnullvm ABI
@ -189,13 +186,20 @@ case $HOST_TARGET in
;;
i686-pc-windows-msvc)
# Host
# Without GC_STRESS as this is the slowest runner.
# Without GC_STRESS as this is a very slow runner.
MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 run_tests
# Extra tier 1
# We really want to ensure a Linux target works on a Windows host,
# and a 64bit target works on a 32bit host.
TEST_TARGET=x86_64-unknown-linux-gnu run_tests
;;
aarch64-pc-windows-msvc)
# Host
# Without GC_STRESS as this is a very slow runner.
MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests
# Extra tier 1
MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests
;;
*)
echo "FATAL: unknown host target: $HOST_TARGET"
exit 1

View file

@ -0,0 +1,62 @@
# **(WIP)** Documentation for Miri-GenMC
[GenMC](https://github.com/MPI-SWS/genmc) is a stateless model checker for exploring concurrent executions of a program.
Miri-GenMC integrates that model checker into Miri.
**NOTE: Currently, no actual GenMC functionality is part of Miri, this is still WIP.**
<!-- FIXME(genmc): add explanation. -->
## Usage
**IMPORTANT: The license of GenMC and thus the `genmc-sys` crate in the Miri repo is currently "GPL-3.0-or-later", so a binary produced with the `genmc` feature is subject to the requirements of the GPL. As long as that remains the case, the `genmc` feature of Miri is OFF-BY-DEFAULT and must be OFF for all Miri releases.**
For testing/developing Miri-GenMC (while keeping in mind the licensing issues):
- clone the Miri repo.
- build Miri-GenMC with `./miri build --features=genmc`.
- OR: install Miri-GenMC in the current system with `./miri install --features=genmc`
Basic usage:
```shell
MIRIFLAGS="-Zmiri-genmc" cargo miri run
```
<!-- FIXME(genmc): explain options. -->
<!-- FIXME(genmc): explain Miri-GenMC specific functions. -->
## Tips
<!-- FIXME(genmc): add tips for using Miri-GenMC more efficiently. -->
## Limitations
Some or all of these limitations might get removed in the future:
- Borrow tracking is currently incompatible (stacked/tree borrows).
- Only Linux is supported for now.
- No support for 32-bit or big-endian targets.
- No cross-target interpretation.
<!-- FIXME(genmc): document remaining limitations -->
## Development
GenMC is written in C++, which complicates development a bit.
The prerequisites for building Miri-GenMC are:
- A compiler with C++23 support.
- LLVM developments headers and clang.
<!-- FIXME(genmc,llvm): remove once LLVM dependency is no longer required. -->
The actual code for GenMC is not contained in the Miri repo itself, but in a [separate GenMC repo](https://github.com/MPI-SWS/genmc) (with its own maintainers).
These sources need to be available to build Miri-GenMC.
The process for obtaining them is as follows:
- By default, a fixed commit of GenMC is downloaded to `genmc-sys/genmc-src` and built automatically.
(The commit is determined by `GENMC_COMMIT` in `genmc-sys/build.rs`.)
- If you want to overwrite that, set the `GENMC_SRC_PATH` environment variable to a path that contains the GenMC sources.
If you place this directory inside the Miri folder, it is recommended to call it `genmc-src` as that tells `./miri fmt` to avoid
formatting the Rust files inside that folder.
<!-- FIXME(genmc): explain how submitting code to GenMC should be handled. -->
<!-- FIXME(genmc): explain development. -->

View file

@ -5,6 +5,7 @@ source = "discover"
linkedProjects = [
"Cargo.toml",
"cargo-miri/Cargo.toml",
"genmc-sys/Cargo.toml",
"miri-script/Cargo.toml",
]

View file

@ -3,6 +3,7 @@
"rust-analyzer.linkedProjects": [
"Cargo.toml",
"cargo-miri/Cargo.toml",
"genmc-sys/Cargo.toml",
"miri-script/Cargo.toml",
],
"rust-analyzer.check.invocationStrategy": "once",

1
src/tools/miri/genmc-sys/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
genmc-src*/

View file

@ -0,0 +1,17 @@
[package]
authors = ["Miri Team"]
# The parts in this repo are MIT OR Apache-2.0, but we are linking in
# code from https://github.com/MPI-SWS/genmc which is GPL-3.0-or-later.
license = "(MIT OR Apache-2.0) AND GPL-3.0-or-later"
name = "genmc-sys"
version = "0.1.0"
edition = "2024"
[dependencies]
cxx = { version = "1.0.160", features = ["c++20"] }
[build-dependencies]
cc = "1.2.16"
cmake = "0.1.54"
git2 = { version = "0.20.2", default-features = false, features = ["https"] }
cxx-build = { version = "1.0.160", features = ["parallel"] }

View file

@ -0,0 +1,269 @@
use std::path::{Path, PathBuf};
use std::str::FromStr;
// Build script for running Miri with GenMC.
// Check out doc/genmc.md for more info.
/// Path where the downloaded GenMC repository will be stored (relative to the `genmc-sys` directory).
/// Note that this directory is *not* cleaned up automatically by `cargo clean`.
const GENMC_DOWNLOAD_PATH: &str = "./genmc-src/";
/// Name of the library of the GenMC model checker.
const GENMC_MODEL_CHECKER: &str = "genmc_lib";
/// Path where the `cxx_bridge!` macro is used to define the Rust-C++ interface.
const RUST_CXX_BRIDGE_FILE_PATH: &str = "src/lib.rs";
/// The profile with which to build GenMC.
const GENMC_CMAKE_PROFILE: &str = "RelWithDebInfo";
mod downloading {
use std::path::PathBuf;
use std::str::FromStr;
use git2::{Commit, Oid, Remote, Repository, StatusOptions};
use super::GENMC_DOWNLOAD_PATH;
/// The GenMC repository the we get our commit from.
pub(crate) const GENMC_GITHUB_URL: &str = "https://github.com/MPI-SWS/genmc.git";
/// The GenMC commit we depend on. It must be available on the specified GenMC repository.
pub(crate) const GENMC_COMMIT: &str = "3438dd2c1202cd4a47ed7881d099abf23e4167ab";
pub(crate) fn download_genmc() -> PathBuf {
let Ok(genmc_download_path) = PathBuf::from_str(GENMC_DOWNLOAD_PATH);
let commit_oid = Oid::from_str(GENMC_COMMIT).expect("Commit should be valid.");
match Repository::open(&genmc_download_path) {
Ok(repo) => {
assert_repo_unmodified(&repo);
let commit = update_local_repo(&repo, commit_oid);
checkout_commit(&repo, &commit);
}
Err(_) => {
let repo = clone_remote_repo(&genmc_download_path);
let Ok(commit) = repo.find_commit(commit_oid) else {
panic!(
"Cloned GenMC repository does not contain required commit '{GENMC_COMMIT}'"
);
};
checkout_commit(&repo, &commit);
}
};
genmc_download_path
}
fn get_remote(repo: &Repository) -> Remote<'_> {
let remote = repo.find_remote("origin").unwrap_or_else(|e| {
panic!(
"Could not load commit ({GENMC_COMMIT}) from remote repository '{GENMC_GITHUB_URL}'. Error: {e}"
);
});
// Ensure that the correct remote URL is set.
let remote_url = remote.url();
if let Some(remote_url) = remote_url
&& remote_url == GENMC_GITHUB_URL
{
return remote;
}
// Update remote URL.
println!(
"cargo::warning=GenMC repository remote URL has changed from '{remote_url:?}' to '{GENMC_GITHUB_URL}'"
);
repo.remote_set_url("origin", GENMC_GITHUB_URL)
.expect("cannot rename url of remote 'origin'");
// Reacquire the `Remote`, since `remote_set_url` doesn't update Remote objects already in memory.
repo.find_remote("origin").unwrap()
}
// Check if the required commit exists already, otherwise try fetching it.
fn update_local_repo(repo: &Repository, commit_oid: Oid) -> Commit<'_> {
repo.find_commit(commit_oid).unwrap_or_else(|_find_error| {
println!("GenMC repository at path '{GENMC_DOWNLOAD_PATH}' does not contain commit '{GENMC_COMMIT}'.");
// The commit is not in the checkout. Try `git fetch` and hope that we find the commit then.
let mut remote = get_remote(repo);
remote.fetch(&[GENMC_COMMIT], None, None).expect("Failed to fetch from remote.");
repo.find_commit(commit_oid)
.expect("Remote repository should contain expected commit")
})
}
fn clone_remote_repo(genmc_download_path: &PathBuf) -> Repository {
Repository::clone(GENMC_GITHUB_URL, &genmc_download_path).unwrap_or_else(|e| {
panic!("Cannot clone GenMC repo from '{GENMC_GITHUB_URL}': {e:?}");
})
}
/// Set the state of the repo to a specific commit
fn checkout_commit(repo: &Repository, commit: &Commit<'_>) {
repo.checkout_tree(commit.as_object(), None).expect("Failed to checkout");
repo.set_head_detached(commit.id()).expect("Failed to set HEAD");
println!("Successfully set checked out commit {commit:?}");
}
/// Check that the downloaded repository is unmodified.
/// If it is modified, explain that it shouldn't be, and hint at how to do local development with GenMC.
/// We don't overwrite any changes made to the directory, to prevent data loss.
fn assert_repo_unmodified(repo: &Repository) {
let statuses = repo
.statuses(Some(
StatusOptions::new()
.include_untracked(true)
.include_ignored(false)
.include_unmodified(false),
))
.expect("should be able to get repository status");
if statuses.is_empty() {
return;
}
panic!(
"Downloaded GenMC repository at path '{GENMC_DOWNLOAD_PATH}' has been modified. Please undo any changes made, or delete the '{GENMC_DOWNLOAD_PATH}' directory to have it downloaded again.\n\
HINT: For local development, set the environment variable 'GENMC_SRC_PATH' to the path of a GenMC repository."
);
}
}
// FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed.
/// The linked LLVM version is in the generated `config.h`` file, which we parse and use to link to LLVM.
/// Returns c++ compiler definitions required for building with/including LLVM, and the include path for LLVM headers.
fn link_to_llvm(config_file: &Path) -> (String, String) {
/// Search a string for a line matching `//@VARIABLE_NAME: VARIABLE CONTENT`
fn extract_value<'a>(input: &'a str, name: &str) -> Option<&'a str> {
input
.lines()
.find_map(|line| line.strip_prefix("//@")?.strip_prefix(name)?.strip_prefix(": "))
}
let file_content = std::fs::read_to_string(&config_file).unwrap_or_else(|err| {
panic!("GenMC config file ({}) should exist, but got errror {err:?}", config_file.display())
});
let llvm_definitions = extract_value(&file_content, "LLVM_DEFINITIONS")
.expect("Config file should contain LLVM_DEFINITIONS");
let llvm_include_dirs = extract_value(&file_content, "LLVM_INCLUDE_DIRS")
.expect("Config file should contain LLVM_INCLUDE_DIRS");
let llvm_library_dir = extract_value(&file_content, "LLVM_LIBRARY_DIR")
.expect("Config file should contain LLVM_LIBRARY_DIR");
let llvm_config_path = extract_value(&file_content, "LLVM_CONFIG_PATH")
.expect("Config file should contain LLVM_CONFIG_PATH");
// Add linker search path.
let lib_dir = PathBuf::from_str(llvm_library_dir).unwrap();
println!("cargo::rustc-link-search=native={}", lib_dir.display());
// Add libraries to link.
let output = std::process::Command::new(llvm_config_path)
.arg("--libs") // Print the libraries to link to (space-separated list)
.output()
.expect("failed to execute llvm-config");
let llvm_link_libs =
String::try_from(output.stdout).expect("llvm-config output should be a valid string");
for link_lib in llvm_link_libs.trim().split(" ") {
let link_lib =
link_lib.strip_prefix("-l").expect("Linker parameter should start with \"-l\"");
println!("cargo::rustc-link-lib=dylib={link_lib}");
}
(llvm_definitions.to_string(), llvm_include_dirs.to_string())
}
/// Build the GenMC model checker library and the Rust-C++ interop library with cxx.rs
fn compile_cpp_dependencies(genmc_path: &Path) {
// Part 1:
// Compile the GenMC library using cmake.
let cmakelists_path = genmc_path.join("CMakeLists.txt");
// FIXME(genmc,cargo): Switch to using `CARGO_CFG_DEBUG_ASSERTIONS` once https://github.com/rust-lang/cargo/issues/15760 is completed.
// Enable/disable additional debug checks, prints and options for GenMC, based on the Rust profile (debug/release)
let enable_genmc_debug = matches!(std::env::var("PROFILE").as_deref().unwrap(), "debug");
let mut config = cmake::Config::new(cmakelists_path);
config.profile(GENMC_CMAKE_PROFILE);
config.define("GENMC_DEBUG", if enable_genmc_debug { "ON" } else { "OFF" });
// The actual compilation happens here:
let genmc_install_dir = config.build();
// Add the model checker library to be linked and tell rustc where to find it:
let cmake_lib_dir = genmc_install_dir.join("lib").join("genmc");
println!("cargo::rustc-link-search=native={}", cmake_lib_dir.display());
println!("cargo::rustc-link-lib=static={GENMC_MODEL_CHECKER}");
// FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed.
let config_file = genmc_install_dir.join("include").join("genmc").join("config.h");
let (llvm_definitions, llvm_include_dirs) = link_to_llvm(&config_file);
// Part 2:
// Compile the cxx_bridge (the link between the Rust and C++ code).
let genmc_include_dir = genmc_install_dir.join("include").join("genmc");
// FIXME(genmc,llvm): remove once LLVM dependency is removed.
// These definitions are parsed into a cmake list and then printed to the config.h file, so they are ';' separated.
let definitions = llvm_definitions.split(";");
let mut bridge = cxx_build::bridge("src/lib.rs");
// FIXME(genmc,cmake): Remove once the GenMC debug setting is available in the config.h file.
if enable_genmc_debug {
bridge.define("ENABLE_GENMC_DEBUG", None);
}
for definition in definitions {
bridge.flag(definition);
}
bridge
.opt_level(2)
.debug(true) // Same settings that GenMC uses (default for cmake `RelWithDebInfo`)
.warnings(false) // NOTE: enabling this produces a lot of warnings.
.std("c++23")
.include(genmc_include_dir)
.include(llvm_include_dirs)
.include("./src_cpp")
.file("./src_cpp/MiriInterface.hpp")
.file("./src_cpp/MiriInterface.cpp")
.compile("genmc_interop");
// Link the Rust-C++ interface library generated by cxx_build:
println!("cargo::rustc-link-lib=static=genmc_interop");
}
fn main() {
// Make sure we don't accidentally distribute a binary with GPL code.
if option_env!("RUSTC_STAGE").is_some() {
panic!(
"genmc should not be enabled in the rustc workspace since it includes a GPL dependency"
);
}
// Select which path to use for the GenMC repo:
let genmc_path = if let Ok(genmc_src_path) = std::env::var("GENMC_SRC_PATH") {
let genmc_src_path =
PathBuf::from_str(&genmc_src_path).expect("GENMC_SRC_PATH should contain a valid path");
assert!(
genmc_src_path.exists(),
"GENMC_SRC_PATH={} does not exist!",
genmc_src_path.display()
);
genmc_src_path
} else {
downloading::download_genmc()
};
// Build all required components:
compile_cpp_dependencies(&genmc_path);
// Only rebuild if anything changes:
// Note that we don't add the downloaded GenMC repo, since that should never be modified
// manually. Adding that path here would also trigger an unnecessary rebuild after the repo is
// cloned (since cargo detects that as a file modification).
println!("cargo::rerun-if-changed={RUST_CXX_BRIDGE_FILE_PATH}");
println!("cargo::rerun-if-changed=./src");
println!("cargo::rerun-if-changed=./src_cpp");
}

View file

@ -0,0 +1,30 @@
pub use self::ffi::*;
impl Default for GenmcParams {
fn default() -> Self {
Self {
print_random_schedule_seed: false,
do_symmetry_reduction: false,
// FIXME(GenMC): Add defaults for remaining parameters
}
}
}
#[cxx::bridge]
mod ffi {
/// Parameters that will be given to GenMC for setting up the model checker.
/// (The fields of this struct are visible to both Rust and C++)
#[derive(Clone, Debug)]
struct GenmcParams {
pub print_random_schedule_seed: bool,
pub do_symmetry_reduction: bool,
// FIXME(GenMC): Add remaining parameters.
}
unsafe extern "C++" {
include!("MiriInterface.hpp");
type MiriGenMCShim;
fn createGenmcHandle(config: &GenmcParams) -> UniquePtr<MiriGenMCShim>;
}
}

View file

@ -0,0 +1,50 @@
#include "MiriInterface.hpp"
#include "genmc-sys/src/lib.rs.h"
auto MiriGenMCShim::createHandle(const GenmcParams &config)
-> std::unique_ptr<MiriGenMCShim>
{
auto conf = std::make_shared<Config>();
// Miri needs all threads to be replayed, even fully completed ones.
conf->replayCompletedThreads = true;
// We only support the RC11 memory model for Rust.
conf->model = ModelType::RC11;
conf->printRandomScheduleSeed = config.print_random_schedule_seed;
// FIXME(genmc): disable any options we don't support currently:
conf->ipr = false;
conf->disableBAM = true;
conf->instructionCaching = false;
ERROR_ON(config.do_symmetry_reduction, "Symmetry reduction is currently unsupported in GenMC mode.");
conf->symmetryReduction = config.do_symmetry_reduction;
// FIXME(genmc): Should there be a way to change this option from Miri?
conf->schedulePolicy = SchedulePolicy::WF;
// FIXME(genmc): implement estimation mode:
conf->estimate = false;
conf->estimationMax = 1000;
const auto mode = conf->estimate ? GenMCDriver::Mode(GenMCDriver::EstimationMode{})
: GenMCDriver::Mode(GenMCDriver::VerificationMode{});
// Running Miri-GenMC without race detection is not supported.
// Disabling this option also changes the behavior of the replay scheduler to only schedule at atomic operations, which is required with Miri.
// This happens because Miri can generate multiple GenMC events for a single MIR terminator. Without this option,
// the scheduler might incorrectly schedule an atomic MIR terminator because the first event it creates is a non-atomic (e.g., `StorageLive`).
conf->disableRaceDetection = false;
// Miri can already check for unfreed memory. Also, GenMC cannot distinguish between memory
// that is allowed to leak and memory that is not.
conf->warnUnfreedMemory = false;
// FIXME(genmc): check config:
// checkConfigOptions(*conf);
auto driver = std::make_unique<MiriGenMCShim>(std::move(conf), mode);
return driver;
}

View file

@ -0,0 +1,44 @@
#ifndef GENMC_MIRI_INTERFACE_HPP
#define GENMC_MIRI_INTERFACE_HPP
#include "rust/cxx.h"
#include "config.h"
#include "Config/Config.hpp"
#include "Verification/GenMCDriver.hpp"
#include <iostream>
/**** Types available to Miri ****/
// Config struct defined on the Rust side and translated to C++ by cxx.rs:
struct GenmcParams;
struct MiriGenMCShim : private GenMCDriver
{
public:
MiriGenMCShim(std::shared_ptr<const Config> conf, Mode mode /* = VerificationMode{} */)
: GenMCDriver(std::move(conf), nullptr, mode)
{
std::cerr << "C++: GenMC handle created!" << std::endl;
}
virtual ~MiriGenMCShim()
{
std::cerr << "C++: GenMC handle destroyed!" << std::endl;
}
static std::unique_ptr<MiriGenMCShim> createHandle(const GenmcParams &config);
};
/**** Functions available to Miri ****/
// NOTE: CXX doesn't support exposing static methods to Rust currently, so we expose this function instead.
static inline auto createGenmcHandle(const GenmcParams &config) -> std::unique_ptr<MiriGenMCShim>
{
return MiriGenMCShim::createHandle(config);
}
#endif /* GENMC_MIRI_INTERFACE_HPP */

View file

@ -0,0 +1,2 @@
repo = "miri"
filter = ":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri"

View file

@ -116,27 +116,6 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "directories"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-sys"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.60.2",
]
[[package]]
name = "dunce"
version = "1.0.5"
@ -165,17 +144,6 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "getrandom"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
"wasi 0.11.1+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
version = "0.3.3"
@ -185,7 +153,7 @@ dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasi 0.14.2+wasi-0.2.4",
"wasi",
]
[[package]]
@ -221,16 +189,6 @@ version = "0.2.174"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]]
name = "libredox"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638"
dependencies = [
"bitflags",
"libc",
]
[[package]]
name = "linux-raw-sys"
version = "0.9.4"
@ -249,7 +207,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"directories",
"dunce",
"itertools",
"path_macro",
@ -275,12 +232,6 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "path_macro"
version = "1.0.0"
@ -311,17 +262,6 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "redox_users"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
dependencies = [
"getrandom 0.2.16",
"libredox",
"thiserror",
]
[[package]]
name = "rustc_version"
version = "0.4.1"
@ -427,32 +367,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
dependencies = [
"fastrand",
"getrandom 0.3.3",
"getrandom",
"once_cell",
"rustix",
"windows-sys 0.59.0",
]
[[package]]
name = "thiserror"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
@ -475,12 +395,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]]
name = "wasi"
version = "0.14.2+wasi-0.2.4"

View file

@ -22,7 +22,6 @@ anyhow = "1.0"
xshell = "0.2.6"
rustc_version = "0.4"
dunce = "1.0.4"
directories = "6"
serde = "1"
serde_json = "1"
serde_derive = "1"

View file

@ -2,11 +2,9 @@ use std::collections::BTreeMap;
use std::ffi::{OsStr, OsString};
use std::fmt::Write as _;
use std::fs::{self, File};
use std::io::{self, BufRead, BufReader, BufWriter, Write as _};
use std::ops::Not;
use std::io::{self, BufRead, BufReader, BufWriter};
use std::path::PathBuf;
use std::time::Duration;
use std::{env, net, process};
use std::{env, process};
use anyhow::{Context, Result, anyhow, bail};
use path_macro::path;
@ -18,11 +16,6 @@ use xshell::{Shell, cmd};
use crate::Command;
use crate::util::*;
/// Used for rustc syncs.
const JOSH_FILTER: &str =
":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri";
const JOSH_PORT: u16 = 42042;
impl MiriEnv {
/// Prepares the environment: builds miri and cargo-miri and a sysroot.
/// Returns the location of the sysroot.
@ -99,66 +92,6 @@ impl Command {
Ok(())
}
fn start_josh() -> Result<impl Drop> {
// Determine cache directory.
let local_dir = {
let user_dirs =
directories::ProjectDirs::from("org", "rust-lang", "miri-josh").unwrap();
user_dirs.cache_dir().to_owned()
};
// Start josh, silencing its output.
let mut cmd = process::Command::new("josh-proxy");
cmd.arg("--local").arg(local_dir);
cmd.arg("--remote").arg("https://github.com");
cmd.arg("--port").arg(JOSH_PORT.to_string());
cmd.arg("--no-background");
cmd.stdout(process::Stdio::null());
cmd.stderr(process::Stdio::null());
let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?;
// Create a wrapper that stops it on drop.
struct Josh(process::Child);
impl Drop for Josh {
fn drop(&mut self) {
#[cfg(unix)]
{
// Try to gracefully shut it down.
process::Command::new("kill")
.args(["-s", "INT", &self.0.id().to_string()])
.output()
.expect("failed to SIGINT josh-proxy");
// Sadly there is no "wait with timeout"... so we just give it some time to finish.
std::thread::sleep(Duration::from_millis(100));
// Now hopefully it is gone.
if self.0.try_wait().expect("failed to wait for josh-proxy").is_some() {
return;
}
}
// If that didn't work (or we're not on Unix), kill it hard.
eprintln!(
"I have to kill josh-proxy the hard way, let's hope this does not break anything."
);
self.0.kill().expect("failed to SIGKILL josh-proxy");
}
}
// Wait until the port is open. We try every 10ms until 1s passed.
for _ in 0..100 {
// This will generally fail immediately when the port is still closed.
let josh_ready = net::TcpStream::connect_timeout(
&net::SocketAddr::from(([127, 0, 0, 1], JOSH_PORT)),
Duration::from_millis(1),
);
if josh_ready.is_ok() {
return Ok(Josh(josh));
}
// Not ready yet.
std::thread::sleep(Duration::from_millis(10));
}
bail!("Even after waiting for 1s, josh-proxy is still not available.")
}
pub fn exec(self) -> Result<()> {
// First, and crucially only once, run the auto-actions -- but not for all commands.
match &self {
@ -170,11 +103,7 @@ impl Command {
| Command::Fmt { .. }
| Command::Doc { .. }
| Command::Clippy { .. } => Self::auto_actions()?,
| Command::Toolchain { .. }
| Command::Bench { .. }
| Command::RustcPull { .. }
| Command::RustcPush { .. }
| Command::Squash => {}
| Command::Toolchain { .. } | Command::Bench { .. } | Command::Squash => {}
}
// Then run the actual command.
match self {
@ -191,8 +120,6 @@ impl Command {
Command::Bench { target, no_install, save_baseline, load_baseline, benches } =>
Self::bench(target, no_install, save_baseline, load_baseline, benches),
Command::Toolchain { flags } => Self::toolchain(flags),
Command::RustcPull { commit } => Self::rustc_pull(commit.clone()),
Command::RustcPush { github_user, branch } => Self::rustc_push(github_user, branch),
Command::Squash => Self::squash(),
}
}
@ -233,156 +160,6 @@ impl Command {
Ok(())
}
fn rustc_pull(commit: Option<String>) -> Result<()> {
let sh = Shell::new()?;
sh.change_dir(miri_dir()?);
let commit = commit.map(Result::Ok).unwrap_or_else(|| {
let rust_repo_head =
cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?;
rust_repo_head
.split_whitespace()
.next()
.map(|front| front.trim().to_owned())
.ok_or_else(|| anyhow!("Could not obtain Rust repo HEAD from remote."))
})?;
// Make sure the repo is clean.
if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() {
bail!("working directory must be clean before running `./miri rustc-pull`");
}
// Make sure josh is running.
let josh = Self::start_josh()?;
let josh_url =
format!("http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git");
// Update rust-version file. As a separate commit, since making it part of
// the merge has confused the heck out of josh in the past.
// We pass `--no-verify` to avoid running git hooks like `./miri fmt` that could in turn
// trigger auto-actions.
// We do this before the merge so that if there are merge conflicts, we have
// the right rust-version file while resolving them.
sh.write_file("rust-version", format!("{commit}\n"))?;
const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rustc";
cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}")
.run()
.context("FAILED to commit rust-version file, something went wrong")?;
// Fetch given rustc commit.
cmd!(sh, "git fetch {josh_url}")
.run()
.inspect_err(|_| {
// Try to un-do the previous `git commit`, to leave the repo in the state we found it.
cmd!(sh, "git reset --hard HEAD^")
.run()
.expect("FAILED to clean up again after failed `git fetch`, sorry for that");
})
.context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;
// This should not add any new root commits. So count those before and after merging.
let num_roots = || -> Result<u32> {
Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count")
.read()
.context("failed to determine the number of root commits")?
.parse::<u32>()?)
};
let num_roots_before = num_roots()?;
// Merge the fetched commit.
const MERGE_COMMIT_MESSAGE: &str = "Merge from rustc";
cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}")
.run()
.context("FAILED to merge new commits, something went wrong")?;
// Check that the number of roots did not increase.
if num_roots()? != num_roots_before {
bail!("Josh created a new root commit. This is probably not the history you want.");
}
drop(josh);
Ok(())
}
fn rustc_push(github_user: String, branch: String) -> Result<()> {
let sh = Shell::new()?;
sh.change_dir(miri_dir()?);
let base = sh.read_file("rust-version")?.trim().to_owned();
// Make sure the repo is clean.
if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() {
bail!("working directory must be clean before running `./miri rustc-push`");
}
// Make sure josh is running.
let josh = Self::start_josh()?;
let josh_url =
format!("http://localhost:{JOSH_PORT}/{github_user}/rust.git{JOSH_FILTER}.git");
// Find a repo we can do our preparation in.
if let Ok(rustc_git) = env::var("RUSTC_GIT") {
// If rustc_git is `Some`, we'll use an existing fork for the branch updates.
sh.change_dir(rustc_git);
} else {
// Otherwise, do this in the local Miri repo.
println!(
"This will pull a copy of the rust-lang/rust history into this Miri checkout, growing it by about 1GB."
);
print!(
"To avoid that, abort now and set the `RUSTC_GIT` environment variable to an existing rustc checkout. Proceed? [y/N] "
);
std::io::stdout().flush()?;
let mut answer = String::new();
std::io::stdin().read_line(&mut answer)?;
if answer.trim().to_lowercase() != "y" {
std::process::exit(1);
}
};
// Prepare the branch. Pushing works much better if we use as base exactly
// the commit that we pulled from last time, so we use the `rust-version`
// file to find out which commit that would be.
println!("Preparing {github_user}/rust (base: {base})...");
if cmd!(sh, "git fetch https://github.com/{github_user}/rust {branch}")
.ignore_stderr()
.read()
.is_ok()
{
println!(
"The branch '{branch}' seems to already exist in 'https://github.com/{github_user}/rust'. Please delete it and try again."
);
std::process::exit(1);
}
cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?;
cmd!(sh, "git push https://github.com/{github_user}/rust {base}:refs/heads/{branch}")
.ignore_stdout()
.ignore_stderr() // silence the "create GitHub PR" message
.run()?;
println!();
// Do the actual push.
sh.change_dir(miri_dir()?);
println!("Pushing miri changes...");
cmd!(sh, "git push {josh_url} HEAD:{branch}").run()?;
println!();
// Do a round-trip check to make sure the push worked as expected.
cmd!(sh, "git fetch {josh_url} {branch}").ignore_stderr().read()?;
let head = cmd!(sh, "git rev-parse HEAD").read()?;
let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?;
if head != fetch_head {
bail!(
"Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\
Expected {head}, got {fetch_head}."
);
}
println!(
"Confirmed that the push round-trips back to Miri properly. Please create a rustc PR:"
);
println!(
// Open PR with `subtree update` title to silence the `no-merges` triagebot check
// See https://github.com/rust-lang/rust/pull/114157
" https://github.com/rust-lang/rust/compare/{github_user}:{branch}?quick_pull=1&title=Miri+subtree+update&body=r?+@ghost"
);
drop(josh);
Ok(())
}
fn squash() -> Result<()> {
let sh = Shell::new()?;
sh.change_dir(miri_dir()?);
@ -757,8 +534,8 @@ impl Command {
if ty.is_file() {
name.ends_with(".rs")
} else {
// dir or symlink. skip `target` and `.git`.
&name != "target" && &name != ".git"
// dir or symlink. skip `target`, `.git` and `genmc-src*`
&name != "target" && &name != ".git" && !name.starts_with("genmc-src")
}
})
.filter_ok(|item| item.file_type().is_file())

View file

@ -142,25 +142,6 @@ pub enum Command {
#[arg(trailing_var_arg = true, allow_hyphen_values = true)]
flags: Vec<String>,
},
/// Pull and merge Miri changes from the rustc repo.
///
/// The fetched commit is stored in the `rust-version` file, so the next `./miri toolchain` will
/// install the rustc that just got pulled.
RustcPull {
/// The commit to fetch (default: latest rustc commit).
commit: Option<String>,
},
/// Push Miri changes back to the rustc repo.
///
/// This will pull a copy of the rustc history into the Miri repo, unless you set the RUSTC_GIT
/// env var to an existing clone of the rustc repo.
RustcPush {
/// The Github user that owns the rustc fork to which we should push.
github_user: String,
/// The branch to push to.
#[arg(default_value = "miri-sync")]
branch: String,
},
/// Squash the commits of the current feature branch into one.
Squash,
}
@ -184,8 +165,7 @@ impl Command {
flags.extend(remainder);
Ok(())
}
Self::Bench { .. } | Self::RustcPull { .. } | Self::RustcPush { .. } | Self::Squash =>
bail!("unexpected \"--\" found in arguments"),
Self::Bench { .. } | Self::Squash => bail!("unexpected \"--\" found in arguments"),
}
}
}

View file

@ -1 +1 @@
6707bf0f59485cf054ac1095725df43220e4be20
733dab558992d902d6d17576de1da768094e2cf3

View file

@ -60,7 +60,7 @@ fn init_logger_once(early_dcx: &EarlyDiagCtxt) {
#[cfg(not(feature = "tracing"))]
{
crate::fatal_error!(
"fatal error: cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature"
"Cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature"
);
}

View file

@ -12,6 +12,7 @@
//! ```rust
//! tracing::info_span!("my_span", tracing_separate_thread = tracing::field::Empty, /* ... */)
//! ```
//! - use i64 instead of u64 for the "id" in [ChromeLayer::get_root_id] to be compatible with Perfetto
//!
//! Depending on the tracing-chrome crate from crates.io is unfortunately not possible, since it
//! depends on `tracing_core` which conflicts with rustc_private's `tracing_core` (meaning it would
@ -285,9 +286,9 @@ struct Callsite {
}
enum Message {
Enter(f64, Callsite, Option<u64>),
Enter(f64, Callsite, Option<i64>),
Event(f64, Callsite),
Exit(f64, Callsite, Option<u64>),
Exit(f64, Callsite, Option<i64>),
NewThread(usize, String),
Flush,
Drop,
@ -519,14 +520,17 @@ where
}
}
fn get_root_id(&self, span: SpanRef<S>) -> Option<u64> {
fn get_root_id(&self, span: SpanRef<S>) -> Option<i64> {
// Returns `Option<i64>` instead of `Option<u64>` because apparently Perfetto gives an
// error if an id does not fit in a 64-bit signed integer in 2's complement. We cast the
// span id from `u64` to `i64` with wraparound, since negative values are fine.
match self.trace_style {
TraceStyle::Threaded => {
if span.fields().field("tracing_separate_thread").is_some() {
// assign an independent "id" to spans with argument "tracing_separate_thread",
// so they appear a separate trace line in trace visualization tools, see
// https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview#heading=h.jh64i9l3vwa1
Some(span.id().into_u64())
Some(span.id().into_u64().cast_signed()) // the comment above explains the cast
} else {
None
}
@ -539,6 +543,7 @@ where
.unwrap_or(span)
.id()
.into_u64()
.cast_signed() // the comment above explains the cast
),
}
}

View file

@ -67,8 +67,6 @@ use crate::log::setup::{deinit_loggers, init_early_loggers, init_late_loggers};
struct MiriCompilerCalls {
miri_config: Option<MiriConfig>,
many_seeds: Option<ManySeedsConfig>,
/// Settings for using GenMC with Miri.
genmc_config: Option<GenmcConfig>,
}
struct ManySeedsConfig {
@ -77,12 +75,8 @@ struct ManySeedsConfig {
}
impl MiriCompilerCalls {
fn new(
miri_config: MiriConfig,
many_seeds: Option<ManySeedsConfig>,
genmc_config: Option<GenmcConfig>,
) -> Self {
Self { miri_config: Some(miri_config), many_seeds, genmc_config }
fn new(miri_config: MiriConfig, many_seeds: Option<ManySeedsConfig>) -> Self {
Self { miri_config: Some(miri_config), many_seeds }
}
}
@ -192,8 +186,8 @@ impl rustc_driver::Callbacks for MiriCompilerCalls {
optimizations is usually marginal at best.");
}
if let Some(genmc_config) = &self.genmc_config {
let _genmc_ctx = Rc::new(GenmcCtx::new(&config, genmc_config));
if let Some(_genmc_config) = &config.genmc_config {
let _genmc_ctx = Rc::new(GenmcCtx::new(&config));
todo!("GenMC mode not yet implemented");
};
@ -487,7 +481,6 @@ fn main() {
let mut many_seeds_keep_going = false;
let mut miri_config = MiriConfig::default();
miri_config.env = env_snapshot;
let mut genmc_config = None;
let mut rustc_args = vec![];
let mut after_dashdash = false;
@ -603,9 +596,9 @@ fn main() {
} else if arg == "-Zmiri-many-seeds-keep-going" {
many_seeds_keep_going = true;
} else if let Some(trimmed_arg) = arg.strip_prefix("-Zmiri-genmc") {
// FIXME(GenMC): Currently, GenMC mode is incompatible with aliasing model checking.
miri_config.borrow_tracker = None;
GenmcConfig::parse_arg(&mut genmc_config, trimmed_arg);
if let Err(msg) = GenmcConfig::parse_arg(&mut miri_config.genmc_config, trimmed_arg) {
fatal_error!("{msg}");
}
} else if let Some(param) = arg.strip_prefix("-Zmiri-env-forward=") {
miri_config.forwarded_env_vars.push(param.to_owned());
} else if let Some(param) = arg.strip_prefix("-Zmiri-env-set=") {
@ -740,13 +733,18 @@ fn main() {
many_seeds.map(|seeds| ManySeedsConfig { seeds, keep_going: many_seeds_keep_going });
// Validate settings for data race detection and GenMC mode.
assert_eq!(genmc_config.is_some(), miri_config.genmc_mode);
if genmc_config.is_some() {
if miri_config.genmc_config.is_some() {
if !miri_config.data_race_detector {
fatal_error!("Cannot disable data race detection in GenMC mode (currently)");
} else if !miri_config.weak_memory_emulation {
fatal_error!("Cannot disable weak memory emulation in GenMC mode");
}
if miri_config.borrow_tracker.is_some() {
eprintln!(
"warning: borrow tracking has been disabled, it is not (yet) supported in GenMC mode."
);
miri_config.borrow_tracker = None;
}
} else if miri_config.weak_memory_emulation && !miri_config.data_race_detector {
fatal_error!(
"Weak memory emulation cannot be enabled when the data race detector is disabled"
@ -765,8 +763,5 @@ fn main() {
);
}
}
run_compiler_and_exit(
&rustc_args,
&mut MiriCompilerCalls::new(miri_config, many_seeds, genmc_config),
)
run_compiler_and_exit(&rustc_args, &mut MiriCompilerCalls::new(miri_config, many_seeds))
}

View file

@ -1,19 +1,35 @@
use crate::MiriConfig;
use super::GenmcParams;
/// Configuration for GenMC mode.
/// The `params` field is shared with the C++ side.
/// The remaining options are kept on the Rust side.
#[derive(Debug, Default, Clone)]
pub struct GenmcConfig {
// TODO: add fields
pub(super) params: GenmcParams,
do_estimation: bool,
// FIXME(GenMC): add remaining options.
}
impl GenmcConfig {
/// Function for parsing command line options for GenMC mode.
/// All GenMC arguments start with the string "-Zmiri-genmc".
///
/// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed
pub fn parse_arg(genmc_config: &mut Option<GenmcConfig>, trimmed_arg: &str) {
/// All GenMC arguments start with the string "-Zmiri-genmc".
/// Passing any GenMC argument will enable GenMC mode.
///
/// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed.
pub fn parse_arg(
genmc_config: &mut Option<GenmcConfig>,
trimmed_arg: &str,
) -> Result<(), String> {
// FIXME(genmc): Ensure host == target somewhere.
if genmc_config.is_none() {
*genmc_config = Some(Default::default());
}
todo!("implement parsing of GenMC options")
if trimmed_arg.is_empty() {
return Ok(()); // this corresponds to "-Zmiri-genmc"
}
// FIXME(GenMC): implement remaining parameters.
todo!();
}
}

Some files were not shown because too many files have changed in this diff Show more