Merge pull request #4560 from rust-lang/rustup-2025-09-03

Automatic Rustup
This commit is contained in:
Ben Kimock 2025-09-03 05:34:15 +00:00 committed by GitHub
commit 5f3197ffb1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
271 changed files with 2766 additions and 2229 deletions

View file

@ -158,11 +158,11 @@ checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
[[package]]
name = "ar_archive_writer"
version = "0.4.2"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01667f6f40216b9a0b2945e05fed5f1ad0ab6470e69cb9378001e37b1c0668e4"
checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b"
dependencies = [
"object 0.36.7",
"object 0.37.3",
]
[[package]]

View file

@ -59,33 +59,6 @@ exclude = [
"obj",
]
[workspace.dependencies]
# tidy-alphabetical-start
bitflags = "2.9.3"
derive-where = "1.6.0"
either = "1.15.0"
indexmap = "2.10.0"
itertools = "0.12.1"
# FIXME: Remove this pin once this rustix issue is resolved
# https://github.com/bytecodealliance/rustix/issues/1496
libc = "=0.2.174"
measureme = "12.0.3"
memchr = "2.7.5"
odht = { version = "0.3.1", features = ["nightly"] }
polonius-engine = "0.13.0"
proc-macro2 = "1.0.101"
quote = "1.0.40"
rustc-demangle = "0.1.26"
rustc-hash = "2.1.1"
rustc-literal-escaper = "0.0.5"
rustc_apfloat = "0.2.3"
scoped-tls = "1.0.1"
serde_json = "1.0.142"
tempfile = "3.20.0"
thin-vec = "0.2.14"
tracing = "0.1.37"
# tidy-alphabetical-end
[profile.release.package.rustc_thread_pool]
# The rustc fork of Rayon has deadlock detection code which intermittently
# causes overflows in the CI (see https://github.com/rust-lang/rust/issues/90227)

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
bitflags = "2.4.1"
rand = { version = "0.9.0", default-features = false, optional = true }
rand_xoshiro = { version = "0.7.0", optional = true }
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
@ -15,7 +15,7 @@ rustc_index = { path = "../rustc_index", default-features = false }
rustc_macros = { path = "../rustc_macros", optional = true }
rustc_serialize = { path = "../rustc_serialize", optional = true }
rustc_span = { path = "../rustc_span", optional = true }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end
[features]

View file

@ -6,6 +6,8 @@ use std::hash::{Hash, Hasher};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd};
#[cfg(feature = "nightly")]
use rustc_macros::{Decodable, Encodable};
#[cfg(feature = "nightly")]
use rustc_span::Symbol;
use crate::AbiFromStrErr;
@ -226,6 +228,13 @@ impl StableOrd for ExternAbi {
#[cfg(feature = "nightly")]
rustc_error_messages::into_diag_arg_using_display!(ExternAbi);
#[cfg(feature = "nightly")]
pub enum CVariadicStatus {
NotSupported,
Stable,
Unstable { feature: Symbol },
}
impl ExternAbi {
/// An ABI "like Rust"
///
@ -238,23 +247,33 @@ impl ExternAbi {
matches!(self, Rust | RustCall | RustCold)
}
pub fn supports_varargs(self) -> bool {
/// Returns whether the ABI supports C variadics. This only controls whether we allow *imports*
/// of such functions via `extern` blocks; there's a separate check during AST construction
/// guarding *definitions* of variadic functions.
#[cfg(feature = "nightly")]
pub fn supports_c_variadic(self) -> CVariadicStatus {
// * C and Cdecl obviously support varargs.
// * C can be based on Aapcs, SysV64 or Win64, so they must support varargs.
// * EfiApi is based on Win64 or C, so it also supports it.
// * System automatically falls back to C when used with variadics, therefore supports it.
//
// * Stdcall does not, because it would be impossible for the callee to clean
// up the arguments. (callee doesn't know how many arguments are there)
// * Same for Fastcall, Vectorcall and Thiscall.
// * Other calling conventions are related to hardware or the compiler itself.
//
// All of the supported ones must have a test in `tests/codegen/cffi/c-variadic-ffi.rs`.
match self {
Self::C { .. }
| Self::Cdecl { .. }
| Self::Aapcs { .. }
| Self::Win64 { .. }
| Self::SysV64 { .. }
| Self::EfiApi => true,
_ => false,
| Self::EfiApi => CVariadicStatus::Stable,
Self::System { .. } => {
CVariadicStatus::Unstable { feature: rustc_span::sym::extern_system_varargs }
}
_ => CVariadicStatus::NotSupported,
}
}
}

View file

@ -63,6 +63,8 @@ mod tests;
pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
#[cfg(feature = "nightly")]
pub use extern_abi::CVariadicStatus;
pub use extern_abi::{ExternAbi, all_names};
#[cfg(feature = "nightly")]
pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
memchr.workspace = true
rustc-literal-escaper.workspace = true
bitflags = "2.4.1"
memchr = "2.7.4"
rustc-literal-escaper = "0.0.5"
rustc_ast_ir = { path = "../rustc_ast_ir" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_index = { path = "../rustc_index" }
@ -15,6 +15,6 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -937,7 +937,7 @@ pub enum PatKind {
#[derive(Clone, Copy, Encodable, Decodable, Debug, PartialEq, Walkable)]
pub enum PatFieldsRest {
/// `module::StructName { field, ..}`
Rest,
Rest(Span),
/// `module::StructName { field, syntax error }`
Recovered(ErrorGuaranteed),
/// `module::StructName { field }`
@ -4051,8 +4051,8 @@ mod size_asserts {
static_assert_size!(Local, 96);
static_assert_size!(MetaItemLit, 40);
static_assert_size!(Param, 40);
static_assert_size!(Pat, 72);
static_assert_size!(PatKind, 48);
static_assert_size!(Pat, 80);
static_assert_size!(PatKind, 56);
static_assert_size!(Path, 24);
static_assert_size!(PathSegment, 24);
static_assert_size!(Stmt, 32);

View file

@ -24,6 +24,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -1434,10 +1434,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
self.dcx().emit_err(FunctionalRecordUpdateDestructuringAssignment {
span: e.span,
});
true
Some(self.lower_span(e.span))
}
StructRest::Rest(_) => true,
StructRest::None => false,
StructRest::Rest(span) => Some(self.lower_span(*span)),
StructRest::None => None,
};
let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
return self.pat_without_dbm(lhs.span, struct_pat);

View file

@ -2028,7 +2028,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
(
hir::ParamName::Plain(self.lower_ident(param.ident)),
hir::GenericParamKind::Const { ty, default, synthetic: false },
hir::GenericParamKind::Const { ty, default },
)
}
}
@ -2508,7 +2508,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fields: &'hir [hir::PatField<'hir>],
) -> &'hir hir::Pat<'hir> {
let qpath = hir::QPath::LangItem(lang_item, self.lower_span(span));
self.pat(span, hir::PatKind::Struct(qpath, fields, false))
self.pat(span, hir::PatKind::Struct(qpath, fields, None))
}
fn pat_ident(&mut self, span: Span, ident: Ident) -> (&'hir hir::Pat<'hir>, HirId) {

View file

@ -106,10 +106,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
break hir::PatKind::Struct(
qpath,
fs,
matches!(
etc,
ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_)
),
match etc {
ast::PatFieldsRest::Rest(sp) => Some(self.lower_span(*sp)),
ast::PatFieldsRest::Recovered(_) => Some(Span::default()),
_ => None,
},
);
}
PatKind::Tuple(pats) => {

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
itertools.workspace = true
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
@ -18,5 +18,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
thin-vec.workspace = true
thin-vec = "0.2.12"
# tidy-alphabetical-end

View file

@ -57,7 +57,7 @@ ast_passes_auto_super_lifetime = auto traits cannot have super traits or lifetim
.label = {ast_passes_auto_super_lifetime}
.suggestion = remove the super traits or lifetime bounds
ast_passes_bad_c_variadic = only foreign, `unsafe extern "C"`, or `unsafe extern "C-unwind"` functions may have a C-variadic arg
ast_passes_bad_c_variadic = defining functions with C-variadic arguments is only allowed for free functions with the "C" or "C-unwind" calling convention
ast_passes_body_in_extern = incorrect `{$kind}` inside `extern` block
.cannot_have = cannot have a body

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
itertools.workspace = true
itertools = "0.12"
rustc_ast = { path = "../rustc_ast" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_span = { path = "../rustc_span" }
@ -13,5 +13,5 @@ rustc_span = { path = "../rustc_span" }
[dev-dependencies]
# tidy-alphabetical-start
thin-vec.workspace = true
thin-vec = "0.2.12"
# tidy-alphabetical-end

View file

@ -1769,7 +1769,7 @@ impl<'a> State<'a> {
},
|f| f.pat.span,
);
if let ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_) = etc {
if let ast::PatFieldsRest::Rest(_) | ast::PatFieldsRest::Recovered(_) = etc {
if !fields.is_empty() {
self.word_space(",");
}

View file

@ -18,5 +18,5 @@ rustc_parse = { path = "../rustc_parse" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
thin-vec.workspace = true
thin-vec = "0.2.12"
# tidy-alphabetical-end

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
either.workspace = true
itertools.workspace = true
polonius-engine.workspace = true
either = "1.5.0"
itertools = "0.12"
polonius-engine = "0.13.0"
rustc_abi = { path = "../rustc_abi" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
@ -25,5 +25,5 @@ rustc_span = { path = "../rustc_span" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_traits = { path = "../rustc_traits" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -8,8 +8,8 @@ use std::str::FromStr;
use polonius_engine::{Algorithm, AllFacts, Output};
use rustc_data_structures::frozen::Frozen;
use rustc_index::IndexSlice;
use rustc_middle::mir::pretty::{PrettyPrintMirOptions, dump_mir_with_options};
use rustc_middle::mir::{Body, PassWhere, Promoted, create_dump_file, dump_enabled, dump_mir};
use rustc_middle::mir::pretty::PrettyPrintMirOptions;
use rustc_middle::mir::{Body, MirDumper, PassWhere, Promoted};
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt};
use rustc_mir_dataflow::move_paths::MoveData;
@ -68,7 +68,9 @@ pub(crate) fn replace_regions_in_mir<'tcx>(
// Replace all remaining regions with fresh inference variables.
renumber::renumber_mir(infcx, body, promoted);
dump_mir(infcx.tcx, false, "renumber", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(infcx.tcx, "renumber", body) {
dumper.dump_mir(body);
}
universal_regions
}
@ -207,9 +209,7 @@ pub(super) fn dump_nll_mir<'tcx>(
borrow_set: &BorrowSet<'tcx>,
) {
let tcx = infcx.tcx;
if !dump_enabled(tcx, "nll", body.source.def_id()) {
return;
}
let Some(dumper) = MirDumper::new(tcx, "nll", body) else { return };
// We want the NLL extra comments printed by default in NLL MIR dumps (they were removed in
// #112346). Specifying `-Z mir-include-spans` on the CLI still has priority: for example,
@ -220,27 +220,24 @@ pub(super) fn dump_nll_mir<'tcx>(
MirIncludeSpans::On | MirIncludeSpans::Nll
),
};
dump_mir_with_options(
tcx,
false,
"nll",
&0,
body,
|pass_where, out| {
emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out)
},
options,
);
let extra_data = &|pass_where, out: &mut dyn std::io::Write| {
emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out)
};
let dumper = dumper.set_extra_data(extra_data).set_options(options);
dumper.dump_mir(body);
// Also dump the region constraint graph as a graphviz file.
let _: io::Result<()> = try {
let mut file = create_dump_file(tcx, "regioncx.all.dot", false, "nll", &0, body)?;
let mut file = dumper.create_dump_file("regioncx.all.dot", body)?;
regioncx.dump_graphviz_raw_constraints(tcx, &mut file)?;
};
// Also dump the region constraint SCC graph as a graphviz file.
let _: io::Result<()> = try {
let mut file = create_dump_file(tcx, "regioncx.scc.dot", false, "nll", &0, body)?;
let mut file = dumper.create_dump_file("regioncx.scc.dot", body)?;
regioncx.dump_graphviz_scc_constraints(tcx, &mut file)?;
};
}

View file

@ -2,9 +2,7 @@ use std::io;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_index::IndexVec;
use rustc_middle::mir::pretty::{
PassWhere, PrettyPrintMirOptions, create_dump_file, dump_enabled, dump_mir_to_writer,
};
use rustc_middle::mir::pretty::{MirDumper, PassWhere, PrettyPrintMirOptions};
use rustc_middle::mir::{Body, Location};
use rustc_middle::ty::{RegionVid, TyCtxt};
use rustc_mir_dataflow::points::PointIndex;
@ -33,22 +31,41 @@ pub(crate) fn dump_polonius_mir<'tcx>(
return;
}
if !dump_enabled(tcx, "polonius", body.source.def_id()) {
return;
}
let Some(dumper) = MirDumper::new(tcx, "polonius", body) else { return };
let polonius_diagnostics =
polonius_diagnostics.expect("missing diagnostics context with `-Zpolonius=next`");
let _: io::Result<()> = try {
let mut file = create_dump_file(tcx, "html", false, "polonius", &0, body)?;
emit_polonius_dump(
let extra_data = &|pass_where, out: &mut dyn io::Write| {
emit_polonius_mir(
tcx,
regioncx,
closure_region_requirements,
borrow_set,
&polonius_diagnostics.localized_outlives_constraints,
pass_where,
out,
)
};
// We want the NLL extra comments printed by default in NLL MIR dumps. Specifying `-Z
// mir-include-spans` on the CLI still has priority.
let options = PrettyPrintMirOptions {
include_extra_comments: matches!(
tcx.sess.opts.unstable_opts.mir_include_spans,
MirIncludeSpans::On | MirIncludeSpans::Nll
),
};
let dumper = dumper.set_extra_data(extra_data).set_options(options);
let _: io::Result<()> = try {
let mut file = dumper.create_dump_file("html", body)?;
emit_polonius_dump(
&dumper,
body,
regioncx,
borrow_set,
&polonius_diagnostics.localized_outlives_constraints,
closure_region_requirements,
&mut file,
)?;
};
@ -61,12 +78,11 @@ pub(crate) fn dump_polonius_mir<'tcx>(
/// - a mermaid graph of the NLL regions and the constraints between them
/// - a mermaid graph of the NLL SCCs and the constraints between them
fn emit_polonius_dump<'tcx>(
tcx: TyCtxt<'tcx>,
dumper: &MirDumper<'_, '_, 'tcx>,
body: &Body<'tcx>,
regioncx: &RegionInferenceContext<'tcx>,
borrow_set: &BorrowSet<'tcx>,
localized_outlives_constraints: &LocalizedOutlivesConstraintSet,
closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
out: &mut dyn io::Write,
) -> io::Result<()> {
// Prepare the HTML dump file prologue.
@ -79,15 +95,7 @@ fn emit_polonius_dump<'tcx>(
writeln!(out, "<div>")?;
writeln!(out, "Raw MIR dump")?;
writeln!(out, "<pre><code>")?;
emit_html_mir(
tcx,
body,
regioncx,
borrow_set,
&localized_outlives_constraints,
closure_region_requirements,
out,
)?;
emit_html_mir(dumper, body, out)?;
writeln!(out, "</code></pre>")?;
writeln!(out, "</div>")?;
@ -116,7 +124,7 @@ fn emit_polonius_dump<'tcx>(
writeln!(out, "<div>")?;
writeln!(out, "NLL regions")?;
writeln!(out, "<pre class='mermaid'>")?;
emit_mermaid_nll_regions(tcx, regioncx, out)?;
emit_mermaid_nll_regions(dumper.tcx(), regioncx, out)?;
writeln!(out, "</pre>")?;
writeln!(out, "</div>")?;
@ -124,7 +132,7 @@ fn emit_polonius_dump<'tcx>(
writeln!(out, "<div>")?;
writeln!(out, "NLL SCCs")?;
writeln!(out, "<pre class='mermaid'>")?;
emit_mermaid_nll_sccs(tcx, regioncx, out)?;
emit_mermaid_nll_sccs(dumper.tcx(), regioncx, out)?;
writeln!(out, "</pre>")?;
writeln!(out, "</div>")?;
@ -149,45 +157,14 @@ fn emit_polonius_dump<'tcx>(
/// Emits the polonius MIR, as escaped HTML.
fn emit_html_mir<'tcx>(
tcx: TyCtxt<'tcx>,
dumper: &MirDumper<'_, '_, 'tcx>,
body: &Body<'tcx>,
regioncx: &RegionInferenceContext<'tcx>,
borrow_set: &BorrowSet<'tcx>,
localized_outlives_constraints: &LocalizedOutlivesConstraintSet,
closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
out: &mut dyn io::Write,
) -> io::Result<()> {
// Buffer the regular MIR dump to be able to escape it.
let mut buffer = Vec::new();
// We want the NLL extra comments printed by default in NLL MIR dumps. Specifying `-Z
// mir-include-spans` on the CLI still has priority.
let options = PrettyPrintMirOptions {
include_extra_comments: matches!(
tcx.sess.opts.unstable_opts.mir_include_spans,
MirIncludeSpans::On | MirIncludeSpans::Nll
),
};
dump_mir_to_writer(
tcx,
"polonius",
&0,
body,
&mut buffer,
|pass_where, out| {
emit_polonius_mir(
tcx,
regioncx,
closure_region_requirements,
borrow_set,
localized_outlives_constraints,
pass_where,
out,
)
},
options,
)?;
dumper.dump_mir_to_writer(body, &mut buffer)?;
// Escape the handful of characters that need it. We don't need to be particularly efficient:
// we're actually writing into a buffered writer already. Note that MIR dumps are valid UTF-8.

View file

@ -30,6 +30,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -44,9 +44,8 @@ pub(crate) fn codegen_fn<'tcx>(
let _mir_guard = crate::PrintOnPanic(|| {
let mut buf = Vec::new();
with_no_trimmed_paths!({
use rustc_middle::mir::pretty;
let options = pretty::PrettyPrintMirOptions::from_cli(tcx);
pretty::write_mir_fn(tcx, mir, &mut |_, _| Ok(()), &mut buf, options).unwrap();
let writer = pretty::MirWriter::new(tcx);
writer.write_mir_fn(mir, &mut buf).unwrap();
});
String::from_utf8_lossy(&buf).into_owned()
});

View file

@ -8,15 +8,15 @@ test = false
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
bitflags = "2.4.1"
# To avoid duplicate dependencies, this should match the version of gimli used
# by `rustc_codegen_ssa` via its `thorin-dwp` dependency.
gimli = "0.31"
itertools.workspace = true
libc.workspace = true
measureme.workspace = true
itertools = "0.12"
libc = "0.2"
measureme = "12.0.1"
object = { version = "0.37.0", default-features = false, features = ["std", "read"] }
rustc-demangle.workspace = true
rustc-demangle = "0.1.21"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
@ -38,9 +38,9 @@ rustc_span = { path = "../rustc_span" }
rustc_symbol_mangling = { path = "../rustc_symbol_mangling" }
rustc_target = { path = "../rustc_target" }
serde = { version = "1", features = ["derive"] }
serde_json.workspace = true
serde_json = "1"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end
[features]

View file

@ -26,6 +26,7 @@ static LLVM_OBJECT_READER: ObjectReader = ObjectReader {
get_symbols: get_llvm_object_symbols,
is_64_bit_object_file: llvm_is_64_bit_object_file,
is_ec_object_file: llvm_is_ec_object_file,
is_any_arm64_coff: llvm_is_any_arm64_coff,
get_xcoff_member_alignment: DEFAULT_OBJECT_READER.get_xcoff_member_alignment,
};
@ -95,3 +96,7 @@ fn llvm_is_64_bit_object_file(buf: &[u8]) -> bool {
fn llvm_is_ec_object_file(buf: &[u8]) -> bool {
unsafe { llvm::LLVMRustIsECObject(buf.as_ptr(), buf.len()) }
}
fn llvm_is_any_arm64_coff(buf: &[u8]) -> bool {
unsafe { llvm::LLVMRustIsAnyArm64Coff(buf.as_ptr(), buf.len()) }
}

View file

@ -2686,6 +2686,8 @@ unsafe extern "C" {
pub(crate) fn LLVMRustIsECObject(buf_ptr: *const u8, buf_len: usize) -> bool;
pub(crate) fn LLVMRustIsAnyArm64Coff(buf_ptr: *const u8, buf_len: usize) -> bool;
pub(crate) fn LLVMRustSetNoSanitizeAddress(Global: &Value);
pub(crate) fn LLVMRustSetNoSanitizeHWAddress(Global: &Value);
}

View file

@ -28,9 +28,12 @@ fn round_pointer_up_to_alignment<'ll>(
align: Align,
ptr_ty: &'ll Type,
) -> &'ll Value {
let mut ptr_as_int = bx.ptrtoint(addr, bx.cx().type_isize());
ptr_as_int = round_up_to_alignment(bx, ptr_as_int, align);
bx.inttoptr(ptr_as_int, ptr_ty)
let ptr = bx.inbounds_ptradd(addr, bx.const_i32(align.bytes() as i32 - 1));
bx.call_intrinsic(
"llvm.ptrmask",
&[ptr_ty, bx.type_i32()],
&[ptr, bx.const_int(bx.isize_ty, -(align.bytes() as isize) as i64)],
)
}
fn emit_direct_ptr_va_arg<'ll, 'tcx>(

View file

@ -5,13 +5,13 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
ar_archive_writer = "0.4.2"
bitflags.workspace = true
ar_archive_writer = "0.5"
bitflags = "2.4.1"
bstr = "1.11.3"
# `cc` updates often break things, so we pin it here. Cargo enforces "max 1 semver-compat version
# per crate", so if you change this, you need to also change it in `rustc_llvm`.
cc = "=1.2.16"
itertools.workspace = true
itertools = "0.12"
pathdiff = "0.2.0"
regex = "1.4"
rustc_abi = { path = "../rustc_abi" }
@ -37,18 +37,18 @@ rustc_span = { path = "../rustc_span" }
rustc_symbol_mangling = { path = "../rustc_symbol_mangling" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
serde_json.workspace = true
serde_json = "1.0.59"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tempfile.workspace = true
thin-vec.workspace = true
tempfile = "3.2"
thin-vec = "0.2.12"
thorin-dwp = "0.9"
tracing.workspace = true
tracing = "0.1"
wasm-encoder = "0.219"
# tidy-alphabetical-end
[target.'cfg(unix)'.dependencies]
# tidy-alphabetical-start
libc.workspace = true
libc = "0.2.50"
# tidy-alphabetical-end
[dependencies.object]

View file

@ -40,16 +40,18 @@ pub struct ImportLibraryItem {
pub is_data: bool,
}
impl From<ImportLibraryItem> for COFFShortExport {
fn from(item: ImportLibraryItem) -> Self {
impl ImportLibraryItem {
fn into_coff_short_export(self, sess: &Session) -> COFFShortExport {
let import_name = (sess.target.arch == "arm64ec").then(|| self.name.clone());
COFFShortExport {
name: item.name,
name: self.name,
ext_name: None,
symbol_name: item.symbol_name,
alias_target: None,
ordinal: item.ordinal.unwrap_or(0),
noname: item.ordinal.is_some(),
data: item.is_data,
symbol_name: self.symbol_name,
import_name,
export_as: None,
ordinal: self.ordinal.unwrap_or(0),
noname: self.ordinal.is_some(),
data: self.is_data,
private: false,
constant: false,
}
@ -113,7 +115,8 @@ pub trait ArchiveBuilderBuilder {
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }),
};
let exports = items.into_iter().map(Into::into).collect::<Vec<_>>();
let exports =
items.into_iter().map(|item| item.into_coff_short_export(sess)).collect::<Vec<_>>();
let machine = match &*sess.target.arch {
"x86_64" => MachineTypes::AMD64,
"x86" => MachineTypes::I386,
@ -134,6 +137,7 @@ pub trait ArchiveBuilderBuilder {
// when linking a rust staticlib using `/WHOLEARCHIVE`.
// See #129020
true,
&[],
) {
sess.dcx()
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() });
@ -527,7 +531,7 @@ impl<'a> ArArchiveBuilder<'a> {
&entries,
archive_kind,
false,
/* is_ec = */ self.sess.target.arch == "arm64ec",
/* is_ec = */ Some(self.sess.target.arch == "arm64ec"),
)?;
archive_tmpfile.flush()?;
drop(archive_tmpfile);

View file

@ -519,6 +519,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] {
LocalRef::Place(va_list) => {
bx.va_end(va_list.val.llval);
// Explicitly end the lifetime of the `va_list`, this matters for LLVM.
bx.lifetime_end(va_list.val.llval, va_list.layout.size);
}
_ => bug!("C-variadic function must have a `VaList` place"),
}

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
either.workspace = true
either = "1"
rustc_abi = { path = "../rustc_abi" }
rustc_apfloat.workspace = true
rustc_apfloat = "0.2.0"
rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
@ -22,5 +22,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -6,15 +6,15 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
arrayvec = { version = "0.7", default-features = false }
bitflags.workspace = true
either.workspace = true
bitflags = "2.4.1"
either = "1.0"
elsa = "1.11.0"
ena = "0.14.3"
indexmap.workspace = true
indexmap = "2.4.0"
jobserver_crate = { version = "0.1.28", package = "jobserver" }
measureme.workspace = true
measureme = "12.0.1"
parking_lot = "0.12"
rustc-hash.workspace = true
rustc-hash = "2.0.0"
rustc-stable-hash = { version = "0.1.0", features = ["nightly"] }
rustc_arena = { path = "../rustc_arena" }
rustc_graphviz = { path = "../rustc_graphviz" }
@ -25,9 +25,9 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_thread_pool = { path = "../rustc_thread_pool" }
smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] }
stacker = "0.1.17"
tempfile.workspace = true
thin-vec.workspace = true
tracing.workspace = true
tempfile = "3.2"
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end
[dependencies.hashbrown]
@ -47,7 +47,7 @@ features = [
[target.'cfg(unix)'.dependencies]
# tidy-alphabetical-start
libc.workspace = true
libc = "0.2"
# tidy-alphabetical-end
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]

View file

@ -49,14 +49,14 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_ty_utils = { path = "../rustc_ty_utils" }
serde_json.workspace = true
serde_json = "1.0.59"
shlex = "1.0"
tracing.workspace = true
tracing = { version = "0.1.35" }
# tidy-alphabetical-end
[target.'cfg(all(unix, any(target_env = "gnu", target_os = "macos")))'.dependencies]
# tidy-alphabetical-start
libc.workspace = true
libc = "0.2"
# tidy-alphabetical-end
[target.'cfg(windows)'.dependencies.windows]

View file

@ -17,6 +17,6 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
tracing.workspace = true
tracing = "0.1"
unic-langid = { version = "0.9.0", features = ["macros"] }
# tidy-alphabetical-end

View file

@ -21,10 +21,10 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
serde = { version = "1.0.125", features = ["derive"] }
serde_json.workspace = true
serde_json = "1.0.59"
termcolor = "1.2.0"
termize = "0.2"
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end
[target.'cfg(windows)'.dependencies.windows]

View file

@ -29,6 +29,6 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -556,7 +556,12 @@ fn metavar_expr_concat<'tx>(
};
match &named_matches[*curr_idx] {
// FIXME(c410-f3r) Nested repetitions are unimplemented
MatchedSeq(_) => unimplemented!(),
MatchedSeq(_) => {
return Err(dcx.struct_span_err(
ident.span,
"nested repetitions with `${concat(...)}` metavariable expressions are not yet supported",
));
}
MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
}
}

View file

@ -9,5 +9,5 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_hir = { path = "../rustc_hir" }
rustc_span = { path = "../rustc_span" }
serde = { version = "1.0.125", features = ["derive"] }
serde_json.workspace = true
serde_json = "1.0.59"
# tidy-alphabetical-end

View file

@ -203,6 +203,9 @@ declare_features! (
(accepted, expr_fragment_specifier_2024, "1.83.0", Some(123742)),
/// Allows arbitrary expressions in key-value attributes at parse time.
(accepted, extended_key_value_attributes, "1.54.0", Some(78835)),
/// Allows using `aapcs`, `efiapi`, `sysv64` and `win64` as calling conventions
/// for functions with varargs.
(accepted, extended_varargs_abi_support, "CURRENT_RUSTC_VERSION", Some(100189)),
/// Allows resolving absolute paths as paths from other crates.
(accepted, extern_absolute_paths, "1.30.0", Some(44660)),
/// Allows `extern crate foo as bar;`. This puts `bar` into extern prelude.

View file

@ -492,9 +492,6 @@ declare_features! (
(incomplete, explicit_tail_calls, "1.72.0", Some(112788)),
/// Allows using `#[export_stable]` which indicates that an item is exportable.
(incomplete, export_stable, "1.88.0", Some(139939)),
/// Allows using `aapcs`, `efiapi`, `sysv64` and `win64` as calling conventions
/// for functions with varargs.
(unstable, extended_varargs_abi_support, "1.65.0", Some(100189)),
/// Allows using `system` as a calling convention with varargs.
(unstable, extern_system_varargs, "1.86.0", Some(136946)),
/// Allows defining `extern type`s.

View file

@ -11,8 +11,8 @@ proc-macro = true
annotate-snippets = "0.11"
fluent-bundle = "0.16"
fluent-syntax = "0.12"
proc-macro2.workspace = true
quote.workspace = true
proc-macro2 = "1"
quote = "1"
syn = { version = "2", features = ["full"] }
unic-langid = { version = "0.9.0", features = ["macros"] }
# tidy-alphabetical-end

View file

@ -5,5 +5,5 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
tempfile.workspace = true
tempfile = "3.7.1"
# tidy-alphabetical-end

View file

@ -5,8 +5,8 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
odht.workspace = true
bitflags = "2.9.1"
odht = { version = "0.3.1", features = ["nightly"] }
rustc_abi = { path = "../rustc_abi" }
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
@ -21,6 +21,6 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -784,7 +784,6 @@ pub enum GenericParamKind<'hir> {
ty: &'hir Ty<'hir>,
/// Optional default value for the const generic param
default: Option<&'hir ConstArg<'hir>>,
synthetic: bool,
},
}
@ -1884,8 +1883,8 @@ pub enum PatKind<'hir> {
Binding(BindingMode, HirId, Ident, Option<&'hir Pat<'hir>>),
/// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
/// The `bool` is `true` in the presence of a `..`.
Struct(QPath<'hir>, &'hir [PatField<'hir>], bool),
/// The `Option` contains the span of a possible `..`.
Struct(QPath<'hir>, &'hir [PatField<'hir>], Option<Span>),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `DotDotPos` denotes its position.
@ -4979,8 +4978,8 @@ mod size_asserts {
static_assert_size!(ItemKind<'_>, 64);
static_assert_size!(LetStmt<'_>, 72);
static_assert_size!(Param<'_>, 32);
static_assert_size!(Pat<'_>, 72);
static_assert_size!(PatKind<'_>, 48);
static_assert_size!(Pat<'_>, 80);
static_assert_size!(PatKind<'_>, 56);
static_assert_size!(Path<'_>, 40);
static_assert_size!(PathSegment<'_>, 48);
static_assert_size!(QPath<'_>, 24);

View file

@ -1085,7 +1085,7 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(
GenericParamKind::Type { ref default, .. } => {
visit_opt!(visitor, visit_ty_unambig, default)
}
GenericParamKind::Const { ref ty, ref default, synthetic: _ } => {
GenericParamKind::Const { ref ty, ref default } => {
try_visit!(visitor.visit_ty_unambig(ty));
if let Some(default) = default {
try_visit!(visitor.visit_const_param_default(*hir_id, default));

View file

@ -9,7 +9,7 @@ doctest = false
[dependencies]
# tidy-alphabetical-start
itertools.workspace = true
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
@ -29,5 +29,5 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -978,7 +978,7 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(),
tcx.ensure_ok().fn_sig(def_id);
let item = tcx.hir_foreign_item(item);
let hir::ForeignItemKind::Fn(sig, ..) = item.kind else { bug!() };
require_c_abi_if_c_variadic(tcx, sig.decl, abi, item.span);
check_c_variadic_abi(tcx, sig.decl, abi, item.span);
}
DefKind::Static { .. } => {
tcx.ensure_ok().codegen_fn_attrs(def_id);

View file

@ -98,7 +98,7 @@ use tracing::debug;
use self::compare_impl_item::collect_return_position_impl_trait_in_trait_tys;
use self::region::region_scope_tree;
use crate::{errors, require_c_abi_if_c_variadic};
use crate::{check_c_variadic_abi, errors};
/// Adds query implementations to the [Providers] vtable, see [`rustc_middle::query`]
pub(super) fn provide(providers: &mut Providers) {

View file

@ -305,7 +305,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics {
ty::GenericParamDefKind::Type { has_default: default.is_some(), synthetic }
}
GenericParamKind::Const { ty: _, default, synthetic } => {
GenericParamKind::Const { ty: _, default } => {
if default.is_some() {
match param_default_policy.expect("no policy for generic param default") {
ParamDefaultPolicy::Allowed => {}
@ -316,7 +316,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics {
}
}
ty::GenericParamDefKind::Const { has_default: default.is_some(), synthetic }
ty::GenericParamDefKind::Const { has_default: default.is_some() }
}
};
Some(ty::GenericParamDef {
@ -523,7 +523,7 @@ impl<'v> Visitor<'v> for AnonConstInParamTyDetector {
type Result = ControlFlow<()>;
fn visit_generic_param(&mut self, p: &'v hir::GenericParam<'v>) -> Self::Result {
if let GenericParamKind::Const { ty, default: _, synthetic: _ } = p.kind {
if let GenericParamKind::Const { ty, default: _ } = p.kind {
let prev = self.in_param_ty;
self.in_param_ty = true;
let res = self.visit_ty_unambig(ty);

View file

@ -419,14 +419,7 @@ pub(crate) fn check_generic_arg_count(
.filter(|param| matches!(param.kind, ty::GenericParamDefKind::Type { synthetic: true, .. }))
.count();
let named_type_param_count = param_counts.types - has_self as usize - synth_type_param_count;
let synth_const_param_count = gen_params
.own_params
.iter()
.filter(|param| {
matches!(param.kind, ty::GenericParamDefKind::Const { synthetic: true, .. })
})
.count();
let named_const_param_count = param_counts.consts - synth_const_param_count;
let named_const_param_count = param_counts.consts;
let infer_lifetimes =
(gen_pos != GenericArgPosition::Type || seg.infer_args) && !gen_args.has_lifetime_params();

View file

@ -52,11 +52,11 @@ use rustc_trait_selection::traits::{self, FulfillmentError};
use tracing::{debug, instrument};
use crate::check::check_abi;
use crate::check_c_variadic_abi;
use crate::errors::{AmbiguousLifetimeBound, BadReturnTypeNotation};
use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_constraint};
use crate::hir_ty_lowering::generics::{check_generic_arg_count, lower_generic_args};
use crate::middle::resolve_bound_vars as rbv;
use crate::require_c_abi_if_c_variadic;
/// A path segment that is semantically allowed to have generic arguments.
#[derive(Debug)]
@ -2412,7 +2412,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
Ty::new_tup_from_iter(tcx, fields.iter().map(|t| self.lower_ty(t)))
}
hir::TyKind::FnPtr(bf) => {
require_c_abi_if_c_variadic(tcx, bf.decl, bf.abi, hir_ty.span);
check_c_variadic_abi(tcx, bf.decl, bf.abi, hir_ty.span);
Ty::new_fn_ptr(
tcx,

View file

@ -90,7 +90,7 @@ mod outlives;
mod variance;
pub use errors::NoVariantNamed;
use rustc_abi::ExternAbi;
use rustc_abi::{CVariadicStatus, ExternAbi};
use rustc_hir::def::DefKind;
use rustc_hir::lints::DelayedLint;
use rustc_hir::{self as hir};
@ -99,7 +99,6 @@ use rustc_middle::mir::interpret::GlobalId;
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, Const, Ty, TyCtxt};
use rustc_session::parse::feature_err;
use rustc_span::symbol::sym;
use rustc_span::{ErrorGuaranteed, Span};
use rustc_trait_selection::traits;
@ -108,46 +107,34 @@ use crate::hir_ty_lowering::{FeedConstTy, HirTyLowerer};
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
fn require_c_abi_if_c_variadic(
tcx: TyCtxt<'_>,
decl: &hir::FnDecl<'_>,
abi: ExternAbi,
span: Span,
) {
// ABIs which can stably use varargs
if !decl.c_variadic || matches!(abi, ExternAbi::C { .. } | ExternAbi::Cdecl { .. }) {
fn check_c_variadic_abi(tcx: TyCtxt<'_>, decl: &hir::FnDecl<'_>, abi: ExternAbi, span: Span) {
if !decl.c_variadic {
// Not even a variadic function.
return;
}
// ABIs with feature-gated stability
let extended_abi_support = tcx.features().extended_varargs_abi_support();
let extern_system_varargs = tcx.features().extern_system_varargs();
// If the feature gate has been enabled, we can stop here
if extern_system_varargs && let ExternAbi::System { .. } = abi {
return;
};
if extended_abi_support && abi.supports_varargs() {
return;
};
// Looks like we need to pick an error to emit.
// Is there any feature which we could have enabled to make this work?
let unstable_explain =
format!("C-variadic functions with the {abi} calling convention are unstable");
match abi {
ExternAbi::System { .. } => {
feature_err(&tcx.sess, sym::extern_system_varargs, span, unstable_explain)
match abi.supports_c_variadic() {
CVariadicStatus::Stable => {}
CVariadicStatus::NotSupported => {
tcx.dcx()
.create_err(errors::VariadicFunctionCompatibleConvention {
span,
convention: &format!("{abi}"),
})
.emit();
}
abi if abi.supports_varargs() => {
feature_err(&tcx.sess, sym::extended_varargs_abi_support, span, unstable_explain)
CVariadicStatus::Unstable { feature } => {
if !tcx.features().enabled(feature) {
feature_err(
&tcx.sess,
feature,
span,
format!("C-variadic functions with the {abi} calling convention are unstable"),
)
.emit();
}
}
_ => tcx.dcx().create_err(errors::VariadicFunctionCompatibleConvention {
span,
convention: &format!("{abi}"),
}),
}
.emit();
}
/// Adds query implementations to the [Providers] vtable, see [`rustc_middle::query`]

View file

@ -1958,12 +1958,12 @@ impl<'a> State<'a> {
self.print_qpath(qpath, true);
self.nbsp();
self.word("{");
let empty = fields.is_empty() && !etc;
let empty = fields.is_empty() && etc.is_none();
if !empty {
self.space();
}
self.commasep_cmnt(Consistent, fields, |s, f| s.print_patfield(f), |f| f.pat.span);
if etc {
if etc.is_some() {
if !fields.is_empty() {
self.word_space(",");
}
@ -2379,7 +2379,7 @@ impl<'a> State<'a> {
self.print_type(default);
}
}
GenericParamKind::Const { ty, ref default, synthetic: _ } => {
GenericParamKind::Const { ty, ref default } => {
self.word_space(":");
self.print_type(ty);
if let Some(default) = default {

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
itertools.workspace = true
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
@ -25,5 +25,5 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -117,7 +117,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Err(Ambiguity(..)) => true,
Err(PrivateMatch(..)) => false,
Err(IllegalSizedBound { .. }) => true,
Err(BadReturnType) => false,
Err(BadReturnType) => true,
Err(ErrorReported(_)) => false,
}
}

View file

@ -962,13 +962,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (opt_rhs_expr, opt_rhs_ty) = opt_rhs.unzip();
let cause = self.cause(
span,
ObligationCauseCode::BinOp {
lhs_hir_id: lhs_expr.hir_id,
rhs_hir_id: opt_rhs_expr.map(|expr| expr.hir_id),
rhs_span: opt_rhs_expr.map(|expr| expr.span),
rhs_is_lit: opt_rhs_expr
.is_some_and(|expr| matches!(expr.kind, hir::ExprKind::Lit(_))),
output_ty: expected.only_has_type(self),
match opt_rhs_expr {
Some(rhs) => ObligationCauseCode::BinOp {
lhs_hir_id: lhs_expr.hir_id,
rhs_hir_id: rhs.hir_id,
rhs_span: rhs.span,
rhs_is_lit: matches!(rhs.kind, hir::ExprKind::Lit(_)),
output_ty: expected.only_has_type(self),
},
None => ObligationCauseCode::UnOp { hir_id: lhs_expr.hir_id },
},
);

View file

@ -605,7 +605,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
},
PatKind::Struct(_, fields, has_rest_pat) => match opt_path_res.unwrap() {
Ok(ResolvedPat { ty, kind: ResolvedPatKind::Struct { variant } }) => self
.check_pat_struct(pat, fields, has_rest_pat, ty, variant, expected, pat_info),
.check_pat_struct(
pat,
fields,
has_rest_pat.is_some(),
ty,
variant,
expected,
pat_info,
),
Err(guar) => {
let ty_err = Ty::new_error(self.tcx, guar);
for field in fields {
@ -2428,7 +2436,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let len = unmentioned_fields.len();
let (prefix, postfix, sp) = match fields {
[] => match &pat.kind {
PatKind::Struct(path, [], false) => {
PatKind::Struct(path, [], None) => {
(" { ", " }", path.span().shrink_to_hi().until(pat.span.shrink_to_hi()))
}
_ => return err,

View file

@ -19,6 +19,6 @@ rustc_middle = { path = "../rustc_middle" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -8,8 +8,8 @@ proc-macro = true
[dependencies]
# tidy-alphabetical-start
proc-macro2.workspace = true
quote.workspace = true
proc-macro2 = "1"
quote = "1"
syn = { version = "2.0.9", features = ["full", "extra-traits"] }
# tidy-alphabetical-end

View file

@ -18,6 +18,6 @@ rustc_middle = { path = "../rustc_middle" }
rustc_span = { path = "../rustc_span" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -46,7 +46,7 @@ rustc_thread_pool = { path = "../rustc_thread_pool" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_traits = { path = "../rustc_traits" }
rustc_ty_utils = { path = "../rustc_ty_utils" }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end
[dev-dependencies]

View file

@ -41,7 +41,7 @@ use rustc_span::{
Symbol, sym,
};
use rustc_target::spec::PanicStrategy;
use rustc_trait_selection::traits;
use rustc_trait_selection::{solve, traits};
use tracing::{info, instrument};
use crate::interface::Compiler;
@ -895,6 +895,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
rustc_hir_typeck::provide(providers);
ty::provide(providers);
traits::provide(providers);
solve::provide(providers);
rustc_passes::provide(providers);
rustc_traits::provide(providers);
rustc_ty_utils::provide(providers);

View file

@ -14,7 +14,7 @@ Rust lexer used by rustc. No stability guarantees are provided.
# Note that this crate purposefully does not depend on other rustc crates
[dependencies]
memchr.workspace = true
memchr = "2.7.4"
unicode-properties = { version = "0.1.0", default-features = false, features = ["emoji"] }
unicode-xid = "0.2.0"

View file

@ -550,28 +550,20 @@ impl Cursor<'_> {
self.eat_while(|ch| ch != '\n' && is_whitespace(ch));
let invalid_infostring = self.first() != '\n';
let mut s = self.as_str();
let mut found = false;
let mut size = 0;
while let Some(closing) = s.find(&"-".repeat(length_opening as usize)) {
let preceding_chars_start = s[..closing].rfind("\n").map_or(0, |i| i + 1);
if s[preceding_chars_start..closing].chars().all(is_whitespace) {
// candidate found
self.bump_bytes(size + closing);
// in case like
// ---cargo
// --- blahblah
// or
// ---cargo
// ----
// combine those stuff into this frontmatter token such that it gets detected later.
self.eat_until(b'\n');
found = true;
break;
} else {
s = &s[closing + length_opening as usize..];
size += closing + length_opening as usize;
}
let nl_fence_pattern = format!("\n{:-<1$}", "", length_opening as usize);
if let Some(closing) = self.as_str().find(&nl_fence_pattern) {
// candidate found
self.bump_bytes(closing + nl_fence_pattern.len());
// in case like
// ---cargo
// --- blahblah
// or
// ---cargo
// ----
// combine those stuff into this frontmatter token such that it gets detected later.
self.eat_until(b'\n');
found = true;
}
if !found {

View file

@ -24,6 +24,6 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
unicode-security = "0.1.0"
# tidy-alphabetical-end

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
libc.workspace = true
libc = "0.2.73"
# tidy-alphabetical-end
[build-dependencies]

View file

@ -103,28 +103,9 @@ LLVMRustGetSymbols(char *BufPtr, size_t BufLen, void *State,
#define TRUE_PTR (void *)1
#define FALSE_PTR (void *)0
extern "C" bool LLVMRustIs64BitSymbolicFile(char *BufPtr, size_t BufLen) {
std::unique_ptr<MemoryBuffer> Buf = MemoryBuffer::getMemBuffer(
StringRef(BufPtr, BufLen), StringRef("LLVMRustGetSymbolsObject"), false);
SmallString<0> SymNameBuf;
auto SymName = raw_svector_ostream(SymNameBuf);
// Code starting from this line is copied from s64BitSymbolicFile in
// ArchiveWriter.cpp.
// In the scenario when LLVMContext is populated SymbolicFile will contain a
// reference to it, thus SymbolicFile should be destroyed first.
LLVMContext Context;
Expected<std::unique_ptr<object::SymbolicFile>> ObjOrErr =
getSymbolicFile(Buf->getMemBufferRef(), Context);
if (!ObjOrErr) {
return false;
}
std::unique_ptr<object::SymbolicFile> Obj = std::move(*ObjOrErr);
return Obj != nullptr ? Obj->is64Bit() : false;
}
extern "C" bool LLVMRustIsECObject(char *BufPtr, size_t BufLen) {
bool withBufferAsSymbolicFile(
char *BufPtr, size_t BufLen,
std::function<bool(object::SymbolicFile &)> Callback) {
std::unique_ptr<MemoryBuffer> Buf = MemoryBuffer::getMemBuffer(
StringRef(BufPtr, BufLen), StringRef("LLVMRustGetSymbolsObject"), false);
SmallString<0> SymNameBuf;
@ -139,29 +120,63 @@ extern "C" bool LLVMRustIsECObject(char *BufPtr, size_t BufLen) {
return false;
}
std::unique_ptr<object::SymbolicFile> Obj = std::move(*ObjOrErr);
if (Obj == nullptr) {
return false;
}
// Code starting from this line is copied from isECObject in
// ArchiveWriter.cpp with an extra #if to work with LLVM 17.
if (Obj->isCOFF())
return cast<llvm::object::COFFObjectFile>(&*Obj)->getMachine() !=
COFF::IMAGE_FILE_MACHINE_ARM64;
if (Obj->isCOFFImportFile())
return cast<llvm::object::COFFImportFile>(&*Obj)->getMachine() !=
COFF::IMAGE_FILE_MACHINE_ARM64;
if (Obj->isIR()) {
Expected<std::string> TripleStr =
getBitcodeTargetTriple(Obj->getMemoryBufferRef());
if (!TripleStr)
return false;
Triple T(*TripleStr);
return T.isWindowsArm64EC() || T.getArch() == Triple::x86_64;
}
return false;
return Callback(*Obj);
}
extern "C" bool LLVMRustIs64BitSymbolicFile(char *BufPtr, size_t BufLen) {
return withBufferAsSymbolicFile(
BufPtr, BufLen, [](object::SymbolicFile &Obj) { return Obj.is64Bit(); });
}
extern "C" bool LLVMRustIsECObject(char *BufPtr, size_t BufLen) {
return withBufferAsSymbolicFile(
BufPtr, BufLen, [](object::SymbolicFile &Obj) {
// Code starting from this line is copied from isECObject in
// ArchiveWriter.cpp with an extra #if to work with LLVM 17.
if (Obj.isCOFF())
return cast<llvm::object::COFFObjectFile>(&Obj)->getMachine() !=
COFF::IMAGE_FILE_MACHINE_ARM64;
if (Obj.isCOFFImportFile())
return cast<llvm::object::COFFImportFile>(&Obj)->getMachine() !=
COFF::IMAGE_FILE_MACHINE_ARM64;
if (Obj.isIR()) {
Expected<std::string> TripleStr =
getBitcodeTargetTriple(Obj.getMemoryBufferRef());
if (!TripleStr)
return false;
Triple T(*TripleStr);
return T.isWindowsArm64EC() || T.getArch() == Triple::x86_64;
}
return false;
});
}
extern "C" bool LLVMRustIsAnyArm64Coff(char *BufPtr, size_t BufLen) {
return withBufferAsSymbolicFile(
BufPtr, BufLen, [](object::SymbolicFile &Obj) {
// Code starting from this line is copied from isAnyArm64COFF in
// ArchiveWriter.cpp.
if (Obj.isCOFF())
return COFF::isAnyArm64(cast<COFFObjectFile>(&Obj)->getMachine());
if (Obj.isCOFFImportFile())
return COFF::isAnyArm64(cast<COFFImportFile>(&Obj)->getMachine());
if (Obj.isIR()) {
Expected<std::string> TripleStr =
getBitcodeTargetTriple(Obj.getMemoryBufferRef());
if (!TripleStr)
return false;
Triple T(*TripleStr);
return T.isOSWindows() && T.getArch() == Triple::aarch64;
}
return false;
});
}

View file

@ -5,10 +5,10 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
tracing = "0.1.28"
tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
tracing-tree = "0.3.1"
tracing.workspace = true
# tidy-alphabetical-end
[features]

View file

@ -8,8 +8,8 @@ proc-macro = true
[dependencies]
# tidy-alphabetical-start
proc-macro2.workspace = true
quote.workspace = true
proc-macro2 = "1"
quote = "1"
syn = { version = "2.0.9", features = ["full"] }
synstructure = "0.13.0"
# tidy-alphabetical-end

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
bitflags = "2.4.1"
libloading = "0.8.0"
odht.workspace = true
odht = { version = "0.3.1", features = ["nightly"] }
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
@ -30,11 +30,11 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
tempfile.workspace = true
tracing.workspace = true
tempfile = "3.7.1"
tracing = "0.1"
# tidy-alphabetical-end
[target.'cfg(target_os = "aix")'.dependencies]
# tidy-alphabetical-start
libc.workspace = true
libc = "0.2"
# tidy-alphabetical-end

View file

@ -5,12 +5,12 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags.workspace = true
either.workspace = true
bitflags = "2.4.1"
either = "1.5.0"
gsgdt = "0.1.2"
polonius-engine.workspace = true
polonius-engine = "0.13.0"
rustc_abi = { path = "../rustc_abi" }
rustc_apfloat.workspace = true
rustc_apfloat = "0.2.0"
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
rustc_ast_ir = { path = "../rustc_ast_ir" }
@ -34,8 +34,8 @@ rustc_target = { path = "../rustc_target" }
rustc_thread_pool = { path = "../rustc_thread_pool" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec.workspace = true
tracing.workspace = true
thin-vec = "0.2.12"
tracing = "0.1"
# tidy-alphabetical-end
[features]

View file

@ -80,6 +80,7 @@ macro_rules! arena_types {
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Ty<'tcx>>
>,
[] inspect_probe: rustc_middle::traits::solve::inspect::Probe<rustc_middle::ty::TyCtxt<'tcx>>,
[] effective_visibilities: rustc_middle::middle::privacy::EffectiveVisibilities,
[] upvars_mentioned: rustc_data_structures::fx::FxIndexMap<rustc_hir::HirId, rustc_hir::Upvar>,
[] dyn_compatibility_violations: rustc_middle::traits::DynCompatibilityViolation,

View file

@ -62,9 +62,7 @@ pub use terminator::*;
pub use self::generic_graph::graphviz_safe_def_name;
pub use self::graphviz::write_mir_graphviz;
pub use self::pretty::{
PassWhere, create_dump_file, display_allocation, dump_enabled, dump_mir, write_mir_pretty,
};
pub use self::pretty::{MirDumper, PassWhere, display_allocation, write_mir_pretty};
/// Types for locals
pub type LocalDecls<'tcx> = IndexSlice<Local, LocalDecl<'tcx>>;

View file

@ -44,7 +44,7 @@ pub enum PassWhere {
}
/// Cosmetic options for pretty-printing the MIR contents, gathered from the CLI. Each pass can
/// override these when dumping its own specific MIR information with [`dump_mir_with_options`].
/// override these when dumping its own specific MIR information with `dump_mir`.
#[derive(Copy, Clone)]
pub struct PrettyPrintMirOptions {
/// Whether to include extra comments, like span info. From `-Z mir-include-spans`.
@ -58,277 +58,253 @@ impl PrettyPrintMirOptions {
}
}
/// If the session is properly configured, dumps a human-readable representation of the MIR (with
/// default pretty-printing options) into:
///
/// ```text
/// rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>
/// ```
///
/// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
/// where `<filter>` takes the following forms:
///
/// - `all` -- dump MIR for all fns, all passes, all everything
/// - a filter defined by a set of substrings combined with `&` and `|`
/// (`&` has higher precedence). At least one of the `|`-separated groups
/// must match; an `|`-separated group matches if all of its `&`-separated
/// substrings are matched.
///
/// Example:
///
/// - `nll` == match if `nll` appears in the name
/// - `foo & nll` == match if `foo` and `nll` both appear in the name
/// - `foo & nll | typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` appears in the name.
/// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` and `bar` both appear in the name.
#[inline]
pub fn dump_mir<'tcx, F>(
tcx: TyCtxt<'tcx>,
pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
extra_data: F,
) where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
dump_mir_with_options(
tcx,
pass_num,
pass_name,
disambiguator,
body,
extra_data,
PrettyPrintMirOptions::from_cli(tcx),
);
/// Manages MIR dumping, which is MIR writing done to a file with a specific name. In particular,
/// it makes it impossible to dump MIR to one of these files when it hasn't been requested from the
/// command line. Layered on top of `MirWriter`, which does the actual writing.
pub struct MirDumper<'dis, 'de, 'tcx> {
show_pass_num: bool,
pass_name: &'static str,
disambiguator: &'dis dyn Display,
writer: MirWriter<'de, 'tcx>,
}
/// If the session is properly configured, dumps a human-readable representation of the MIR, with
/// the given [pretty-printing options][PrettyPrintMirOptions].
///
/// See [`dump_mir`] for more details.
///
#[inline]
pub fn dump_mir_with_options<'tcx, F>(
tcx: TyCtxt<'tcx>,
pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
extra_data: F,
options: PrettyPrintMirOptions,
) where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
if !dump_enabled(tcx, pass_name, body.source.def_id()) {
return;
}
dump_matched_mir_node(tcx, pass_num, pass_name, disambiguator, body, extra_data, options);
}
pub fn dump_enabled(tcx: TyCtxt<'_>, pass_name: &str, def_id: DefId) -> bool {
let Some(ref filters) = tcx.sess.opts.unstable_opts.dump_mir else {
return false;
};
// see notes on #41697 below
let node_path = ty::print::with_forced_impl_filename_line!(tcx.def_path_str(def_id));
filters.split('|').any(|or_filter| {
or_filter.split('&').all(|and_filter| {
let and_filter_trimmed = and_filter.trim();
and_filter_trimmed == "all"
|| pass_name.contains(and_filter_trimmed)
|| node_path.contains(and_filter_trimmed)
})
})
}
// #41697 -- we use `with_forced_impl_filename_line()` because
// `def_path_str()` would otherwise trigger `type_of`, and this can
// run while we are already attempting to evaluate `type_of`.
/// Most use-cases of dumping MIR should use the [dump_mir] entrypoint instead, which will also
/// check if dumping MIR is enabled, and if this body matches the filters passed on the CLI.
///
/// That being said, if the above requirements have been validated already, this function is where
/// most of the MIR dumping occurs, if one needs to export it to a file they have created with
/// [create_dump_file], rather than to a new file created as part of [dump_mir], or to stdout/stderr
/// for debugging purposes.
pub fn dump_mir_to_writer<'tcx, F>(
tcx: TyCtxt<'tcx>,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
w: &mut dyn io::Write,
mut extra_data: F,
options: PrettyPrintMirOptions,
) -> io::Result<()>
where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
// see notes on #41697 above
let def_path =
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
// ignore-tidy-odd-backticks the literal below is fine
write!(w, "// MIR for `{def_path}")?;
match body.source.promoted {
None => write!(w, "`")?,
Some(promoted) => write!(w, "::{promoted:?}`")?,
}
writeln!(w, " {disambiguator} {pass_name}")?;
if let Some(ref layout) = body.coroutine_layout_raw() {
writeln!(w, "/* coroutine_layout = {layout:#?} */")?;
}
writeln!(w)?;
extra_data(PassWhere::BeforeCFG, w)?;
write_user_type_annotations(tcx, body, w)?;
write_mir_fn(tcx, body, &mut extra_data, w, options)?;
extra_data(PassWhere::AfterCFG, w)
}
fn dump_matched_mir_node<'tcx, F>(
tcx: TyCtxt<'tcx>,
pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
extra_data: F,
options: PrettyPrintMirOptions,
) where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
let _: io::Result<()> = try {
let mut file = create_dump_file(tcx, "mir", pass_num, pass_name, disambiguator, body)?;
dump_mir_to_writer(tcx, pass_name, disambiguator, body, &mut file, extra_data, options)?;
};
if tcx.sess.opts.unstable_opts.dump_mir_graphviz {
let _: io::Result<()> = try {
let mut file = create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, body)?;
write_mir_fn_graphviz(tcx, body, false, &mut file)?;
impl<'dis, 'de, 'tcx> MirDumper<'dis, 'de, 'tcx> {
// If dumping should be performed (e.g. because it was requested on the
// CLI), returns a `MirDumper` with default values for the following fields:
// - `show_pass_num`: `false`
// - `disambiguator`: `&0`
// - `writer.extra_data`: a no-op
// - `writer.options`: default options derived from CLI flags
pub fn new(tcx: TyCtxt<'tcx>, pass_name: &'static str, body: &Body<'tcx>) -> Option<Self> {
let dump_enabled = if let Some(ref filters) = tcx.sess.opts.unstable_opts.dump_mir {
// see notes on #41697 below
let node_path =
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
filters.split('|').any(|or_filter| {
or_filter.split('&').all(|and_filter| {
let and_filter_trimmed = and_filter.trim();
and_filter_trimmed == "all"
|| pass_name.contains(and_filter_trimmed)
|| node_path.contains(and_filter_trimmed)
})
})
} else {
false
};
dump_enabled.then_some(MirDumper {
show_pass_num: false,
pass_name,
disambiguator: &0,
writer: MirWriter::new(tcx),
})
}
}
/// Returns the path to the filename where we should dump a given MIR.
/// Also used by other bits of code (e.g., NLL inference) that dump
/// graphviz data or other things.
fn dump_path<'tcx>(
tcx: TyCtxt<'tcx>,
extension: &str,
pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
) -> PathBuf {
let source = body.source;
let promotion_id = match source.promoted {
Some(id) => format!("-{id:?}"),
None => String::new(),
};
pub fn tcx(&self) -> TyCtxt<'tcx> {
self.writer.tcx
}
let pass_num = if tcx.sess.opts.unstable_opts.dump_mir_exclude_pass_number {
String::new()
} else if pass_num {
let (dialect_index, phase_index) = body.phase.index();
format!(".{}-{}-{:03}", dialect_index, phase_index, body.pass_count)
} else {
".-------".to_string()
};
#[must_use]
pub fn set_show_pass_num(mut self) -> Self {
self.show_pass_num = true;
self
}
let crate_name = tcx.crate_name(source.def_id().krate);
let item_name = tcx.def_path(source.def_id()).to_filename_friendly_no_crate();
// All drop shims have the same DefId, so we have to add the type
// to get unique file names.
let shim_disambiguator = match source.instance {
ty::InstanceKind::DropGlue(_, Some(ty)) => {
// Unfortunately, pretty-printed typed are not very filename-friendly.
// We dome some filtering.
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => {
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::AsyncDropGlue(_, ty) => {
let ty::Coroutine(_, args) = ty.kind() else {
bug!();
#[must_use]
pub fn set_disambiguator(mut self, disambiguator: &'dis dyn Display) -> Self {
self.disambiguator = disambiguator;
self
}
#[must_use]
pub fn set_extra_data(
mut self,
extra_data: &'de dyn Fn(PassWhere, &mut dyn io::Write) -> io::Result<()>,
) -> Self {
self.writer.extra_data = extra_data;
self
}
#[must_use]
pub fn set_options(mut self, options: PrettyPrintMirOptions) -> Self {
self.writer.options = options;
self
}
/// If the session is properly configured, dumps a human-readable representation of the MIR
/// (with default pretty-printing options) into:
///
/// ```text
/// rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>
/// ```
///
/// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
/// where `<filter>` takes the following forms:
///
/// - `all` -- dump MIR for all fns, all passes, all everything
/// - a filter defined by a set of substrings combined with `&` and `|`
/// (`&` has higher precedence). At least one of the `|`-separated groups
/// must match; an `|`-separated group matches if all of its `&`-separated
/// substrings are matched.
///
/// Example:
///
/// - `nll` == match if `nll` appears in the name
/// - `foo & nll` == match if `foo` and `nll` both appear in the name
/// - `foo & nll | typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` appears in the name.
/// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` and `bar` both appear in the name.
pub fn dump_mir(&self, body: &Body<'tcx>) {
let _: io::Result<()> = try {
let mut file = self.create_dump_file("mir", body)?;
self.dump_mir_to_writer(body, &mut file)?;
};
if self.tcx().sess.opts.unstable_opts.dump_mir_graphviz {
let _: io::Result<()> = try {
let mut file = self.create_dump_file("dot", body)?;
write_mir_fn_graphviz(self.tcx(), body, false, &mut file)?;
};
let ty = args.first().unwrap().expect_ty();
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => {
let mut s = ".".to_owned();
s.extend(proxy_cor.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s.push('.');
s.extend(impl_cor.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
_ => String::new(),
};
let mut file_path = PathBuf::new();
file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
let file_name = format!(
"{crate_name}.{item_name}{shim_disambiguator}{promotion_id}{pass_num}.{pass_name}.{disambiguator}.{extension}",
);
file_path.push(&file_name);
file_path
}
/// Attempts to open a file where we should dump a given MIR or other
/// bit of MIR-related data. Used by `mir-dump`, but also by other
/// bits of code (e.g., NLL inference) that dump graphviz data or
/// other things, and hence takes the extension as an argument.
pub fn create_dump_file<'tcx>(
tcx: TyCtxt<'tcx>,
extension: &str,
pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
) -> io::Result<io::BufWriter<fs::File>> {
let file_path = dump_path(tcx, extension, pass_num, pass_name, disambiguator, body);
if let Some(parent) = file_path.parent() {
fs::create_dir_all(parent).map_err(|e| {
io::Error::new(
e.kind(),
format!("IO error creating MIR dump directory: {parent:?}; {e}"),
)
})?;
}
fs::File::create_buffered(&file_path).map_err(|e| {
io::Error::new(e.kind(), format!("IO error creating MIR dump file: {file_path:?}; {e}"))
})
// #41697 -- we use `with_forced_impl_filename_line()` because `def_path_str()` would otherwise
// trigger `type_of`, and this can run while we are already attempting to evaluate `type_of`.
pub fn dump_mir_to_writer(&self, body: &Body<'tcx>, w: &mut dyn io::Write) -> io::Result<()> {
// see notes on #41697 above
let def_path = ty::print::with_forced_impl_filename_line!(
self.tcx().def_path_str(body.source.def_id())
);
// ignore-tidy-odd-backticks the literal below is fine
write!(w, "// MIR for `{def_path}")?;
match body.source.promoted {
None => write!(w, "`")?,
Some(promoted) => write!(w, "::{promoted:?}`")?,
}
writeln!(w, " {} {}", self.disambiguator, self.pass_name)?;
if let Some(ref layout) = body.coroutine_layout_raw() {
writeln!(w, "/* coroutine_layout = {layout:#?} */")?;
}
writeln!(w)?;
(self.writer.extra_data)(PassWhere::BeforeCFG, w)?;
write_user_type_annotations(self.tcx(), body, w)?;
self.writer.write_mir_fn(body, w)?;
(self.writer.extra_data)(PassWhere::AfterCFG, w)
}
/// Returns the path to the filename where we should dump a given MIR.
/// Also used by other bits of code (e.g., NLL inference) that dump
/// graphviz data or other things.
fn dump_path(&self, extension: &str, body: &Body<'tcx>) -> PathBuf {
let tcx = self.tcx();
let source = body.source;
let promotion_id = match source.promoted {
Some(id) => format!("-{id:?}"),
None => String::new(),
};
let pass_num = if tcx.sess.opts.unstable_opts.dump_mir_exclude_pass_number {
String::new()
} else if self.show_pass_num {
let (dialect_index, phase_index) = body.phase.index();
format!(".{}-{}-{:03}", dialect_index, phase_index, body.pass_count)
} else {
".-------".to_string()
};
let crate_name = tcx.crate_name(source.def_id().krate);
let item_name = tcx.def_path(source.def_id()).to_filename_friendly_no_crate();
// All drop shims have the same DefId, so we have to add the type
// to get unique file names.
let shim_disambiguator = match source.instance {
ty::InstanceKind::DropGlue(_, Some(ty)) => {
// Unfortunately, pretty-printed types are not very filename-friendly.
// We do some filtering.
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => {
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::AsyncDropGlue(_, ty) => {
let ty::Coroutine(_, args) = ty.kind() else {
bug!();
};
let ty = args.first().unwrap().expect_ty();
let mut s = ".".to_owned();
s.extend(ty.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => {
let mut s = ".".to_owned();
s.extend(proxy_cor.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s.push('.');
s.extend(impl_cor.to_string().chars().filter_map(|c| match c {
' ' => None,
':' | '<' | '>' => Some('_'),
c => Some(c),
}));
s
}
_ => String::new(),
};
let mut file_path = PathBuf::new();
file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
let pass_name = self.pass_name;
let disambiguator = self.disambiguator;
let file_name = format!(
"{crate_name}.{item_name}{shim_disambiguator}{promotion_id}{pass_num}.{pass_name}.{disambiguator}.{extension}",
);
file_path.push(&file_name);
file_path
}
/// Attempts to open a file where we should dump a given MIR or other
/// bit of MIR-related data. Used by `mir-dump`, but also by other
/// bits of code (e.g., NLL inference) that dump graphviz data or
/// other things, and hence takes the extension as an argument.
pub fn create_dump_file(
&self,
extension: &str,
body: &Body<'tcx>,
) -> io::Result<io::BufWriter<fs::File>> {
let file_path = self.dump_path(extension, body);
if let Some(parent) = file_path.parent() {
fs::create_dir_all(parent).map_err(|e| {
io::Error::new(
e.kind(),
format!("IO error creating MIR dump directory: {parent:?}; {e}"),
)
})?;
}
fs::File::create_buffered(&file_path).map_err(|e| {
io::Error::new(e.kind(), format!("IO error creating MIR dump file: {file_path:?}; {e}"))
})
}
}
///////////////////////////////////////////////////////////////////////////
@ -341,7 +317,7 @@ pub fn write_mir_pretty<'tcx>(
single: Option<DefId>,
w: &mut dyn io::Write,
) -> io::Result<()> {
let options = PrettyPrintMirOptions::from_cli(tcx);
let writer = MirWriter::new(tcx);
writeln!(w, "// WARNING: This output format is intended for human consumers only")?;
writeln!(w, "// and is subject to change without notice. Knock yourself out.")?;
@ -357,11 +333,11 @@ pub fn write_mir_pretty<'tcx>(
}
let render_body = |w: &mut dyn io::Write, body| -> io::Result<()> {
write_mir_fn(tcx, body, &mut |_, _| Ok(()), w, options)?;
writer.write_mir_fn(body, w)?;
for body in tcx.promoted_mir(def_id) {
writeln!(w)?;
write_mir_fn(tcx, body, &mut |_, _| Ok(()), w, options)?;
writer.write_mir_fn(body, w)?;
}
Ok(())
};
@ -373,7 +349,7 @@ pub fn write_mir_pretty<'tcx>(
writeln!(w, "// MIR FOR CTFE")?;
// Do not use `render_body`, as that would render the promoteds again, but these
// are shared between mir_for_ctfe and optimized_mir
write_mir_fn(tcx, tcx.mir_for_ctfe(def_id), &mut |_, _| Ok(()), w, options)?;
writer.write_mir_fn(tcx.mir_for_ctfe(def_id), w)?;
} else {
let instance_mir = tcx.instance_mir(ty::InstanceKind::Item(def_id));
render_body(w, instance_mir)?;
@ -382,31 +358,35 @@ pub fn write_mir_pretty<'tcx>(
Ok(())
}
/// Write out a human-readable textual representation for the given function.
pub fn write_mir_fn<'tcx, F>(
/// Does the writing of MIR to output, e.g. a file.
pub struct MirWriter<'de, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
extra_data: &mut F,
w: &mut dyn io::Write,
extra_data: &'de dyn Fn(PassWhere, &mut dyn io::Write) -> io::Result<()>,
options: PrettyPrintMirOptions,
) -> io::Result<()>
where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
write_mir_intro(tcx, body, w, options)?;
for block in body.basic_blocks.indices() {
extra_data(PassWhere::BeforeBlock(block), w)?;
write_basic_block(tcx, block, body, extra_data, w, options)?;
if block.index() + 1 != body.basic_blocks.len() {
writeln!(w)?;
}
}
impl<'de, 'tcx> MirWriter<'de, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>) -> Self {
MirWriter { tcx, extra_data: &|_, _| Ok(()), options: PrettyPrintMirOptions::from_cli(tcx) }
}
writeln!(w, "}}")?;
/// Write out a human-readable textual representation for the given function.
pub fn write_mir_fn(&self, body: &Body<'tcx>, w: &mut dyn io::Write) -> io::Result<()> {
write_mir_intro(self.tcx, body, w, self.options)?;
for block in body.basic_blocks.indices() {
(self.extra_data)(PassWhere::BeforeBlock(block), w)?;
self.write_basic_block(block, body, w)?;
if block.index() + 1 != body.basic_blocks.len() {
writeln!(w)?;
}
}
write_allocations(tcx, body, w)?;
writeln!(w, "}}")?;
Ok(())
write_allocations(self.tcx, body, w)?;
Ok(())
}
}
/// Prints local variables in a scope tree.
@ -719,95 +699,88 @@ pub fn dump_mir_def_ids(tcx: TyCtxt<'_>, single: Option<DefId>) -> Vec<DefId> {
///////////////////////////////////////////////////////////////////////////
// Basic blocks and their parts (statements, terminators, ...)
/// Write out a human-readable textual representation for the given basic block.
fn write_basic_block<'tcx, F>(
tcx: TyCtxt<'tcx>,
block: BasicBlock,
body: &Body<'tcx>,
extra_data: &mut F,
w: &mut dyn io::Write,
options: PrettyPrintMirOptions,
) -> io::Result<()>
where
F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
{
let data = &body[block];
impl<'de, 'tcx> MirWriter<'de, 'tcx> {
/// Write out a human-readable textual representation for the given basic block.
fn write_basic_block(
&self,
block: BasicBlock,
body: &Body<'tcx>,
w: &mut dyn io::Write,
) -> io::Result<()> {
let data = &body[block];
// Basic block label at the top.
let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
writeln!(w, "{INDENT}{block:?}{cleanup_text}: {{")?;
// Basic block label at the top.
let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
writeln!(w, "{INDENT}{block:?}{cleanup_text}: {{")?;
// List of statements in the middle.
let mut current_location = Location { block, statement_index: 0 };
for statement in &data.statements {
extra_data(PassWhere::BeforeLocation(current_location), w)?;
let indented_body = format!("{INDENT}{INDENT}{statement:?};");
if options.include_extra_comments {
writeln!(
// List of statements in the middle.
let mut current_location = Location { block, statement_index: 0 };
for statement in &data.statements {
(self.extra_data)(PassWhere::BeforeLocation(current_location), w)?;
let indented_body = format!("{INDENT}{INDENT}{statement:?};");
if self.options.include_extra_comments {
writeln!(
w,
"{:A$} // {}{}",
indented_body,
if self.tcx.sess.verbose_internals() {
format!("{current_location:?}: ")
} else {
String::new()
},
comment(self.tcx, statement.source_info),
A = ALIGN,
)?;
} else {
writeln!(w, "{indented_body}")?;
}
write_extra(
self.tcx,
w,
"{:A$} // {}{}",
indented_body,
if tcx.sess.verbose_internals() {
format!("{current_location:?}: ")
} else {
String::new()
},
comment(tcx, statement.source_info),
A = ALIGN,
&|visitor| visitor.visit_statement(statement, current_location),
self.options,
)?;
} else {
writeln!(w, "{indented_body}")?;
(self.extra_data)(PassWhere::AfterLocation(current_location), w)?;
current_location.statement_index += 1;
}
write_extra(
tcx,
w,
|visitor| {
visitor.visit_statement(statement, current_location);
},
options,
)?;
// Terminator at the bottom.
(self.extra_data)(PassWhere::BeforeLocation(current_location), w)?;
if data.terminator.is_some() {
let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
if self.options.include_extra_comments {
writeln!(
w,
"{:A$} // {}{}",
indented_terminator,
if self.tcx.sess.verbose_internals() {
format!("{current_location:?}: ")
} else {
String::new()
},
comment(self.tcx, data.terminator().source_info),
A = ALIGN,
)?;
} else {
writeln!(w, "{indented_terminator}")?;
}
extra_data(PassWhere::AfterLocation(current_location), w)?;
current_location.statement_index += 1;
}
// Terminator at the bottom.
extra_data(PassWhere::BeforeLocation(current_location), w)?;
if data.terminator.is_some() {
let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
if options.include_extra_comments {
writeln!(
write_extra(
self.tcx,
w,
"{:A$} // {}{}",
indented_terminator,
if tcx.sess.verbose_internals() {
format!("{current_location:?}: ")
} else {
String::new()
},
comment(tcx, data.terminator().source_info),
A = ALIGN,
&|visitor| visitor.visit_terminator(data.terminator(), current_location),
self.options,
)?;
} else {
writeln!(w, "{indented_terminator}")?;
}
write_extra(
tcx,
w,
|visitor| {
visitor.visit_terminator(data.terminator(), current_location);
},
options,
)?;
(self.extra_data)(PassWhere::AfterLocation(current_location), w)?;
(self.extra_data)(PassWhere::AfterTerminator(block), w)?;
writeln!(w, "{INDENT}}}")
}
extra_data(PassWhere::AfterLocation(current_location), w)?;
extra_data(PassWhere::AfterTerminator(block), w)?;
writeln!(w, "{INDENT}}}")
}
impl Debug for Statement<'_> {
@ -1374,15 +1347,12 @@ fn post_fmt_projection(projection: &[PlaceElem<'_>], fmt: &mut Formatter<'_>) ->
/// After we print the main statement, we sometimes dump extra
/// information. There's often a lot of little things "nuzzled up" in
/// a statement.
fn write_extra<'tcx, F>(
fn write_extra<'tcx>(
tcx: TyCtxt<'tcx>,
write: &mut dyn io::Write,
mut visit_op: F,
visit_op: &dyn Fn(&mut ExtraComments<'tcx>),
options: PrettyPrintMirOptions,
) -> io::Result<()>
where
F: FnMut(&mut ExtraComments<'tcx>),
{
) -> io::Result<()> {
if options.include_extra_comments {
let mut extra_comments = ExtraComments { tcx, comments: vec![] };
visit_op(&mut extra_comments);

View file

@ -6,6 +6,7 @@ use rustc_span::ErrorGuaranteed;
use crate::mir::interpret::EvalToValTreeResult;
use crate::query::CyclePlaceholder;
use crate::traits::solve;
use crate::ty::adjustment::CoerceUnsizedInfo;
use crate::ty::{self, Ty, TyCtxt};
use crate::{mir, traits};
@ -219,6 +220,10 @@ impl<T0, T1> EraseType for (&'_ T0, &'_ T1) {
type Result = [u8; size_of::<(&'static (), &'static ())>()];
}
impl<T0> EraseType for (solve::QueryResult<'_>, &'_ T0) {
type Result = [u8; size_of::<(solve::QueryResult<'static>, &'static ())>()];
}
impl<T0, T1> EraseType for (&'_ T0, &'_ [T1]) {
type Result = [u8; size_of::<(&'static (), &'static [()])>()];
}

View file

@ -131,7 +131,7 @@ use crate::traits::query::{
};
use crate::traits::{
CodegenObligationError, DynCompatibilityViolation, EvaluationResult, ImplSource,
ObligationCause, OverflowError, WellFormedLoc, specialization_graph,
ObligationCause, OverflowError, WellFormedLoc, solve, specialization_graph,
};
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::layout::ValidityRequirement;
@ -2563,6 +2563,14 @@ rustc_queries! {
desc { "computing autoderef types for `{}`", goal.canonical.value.value }
}
/// Used by `-Znext-solver` to compute proof trees.
query evaluate_root_goal_for_proof_tree_raw(
goal: solve::CanonicalInput<'tcx>,
) -> (solve::QueryResult<'tcx>, &'tcx solve::inspect::Probe<TyCtxt<'tcx>>) {
no_hash
desc { "computing proof tree for `{}`", goal.canonical.value.goal.predicate }
}
/// Returns the Rust target features for the current target. These are not always the same as LLVM target features!
query rust_target_features(_: CrateNum) -> &'tcx UnordMap<String, rustc_target::target_features::Stability> {
arena_cache

View file

@ -389,10 +389,14 @@ pub enum ObligationCauseCode<'tcx> {
/// against.
MatchImpl(ObligationCause<'tcx>, DefId),
UnOp {
hir_id: HirId,
},
BinOp {
lhs_hir_id: HirId,
rhs_hir_id: Option<HirId>,
rhs_span: Option<Span>,
rhs_hir_id: HirId,
rhs_span: Span,
rhs_is_lit: bool,
output_ty: Option<Ty<'tcx>>,
},

View file

@ -72,9 +72,9 @@ use crate::query::plumbing::QuerySystem;
use crate::query::{IntoQueryParam, LocalCrate, Providers, TyCtxtAt};
use crate::thir::Thir;
use crate::traits;
use crate::traits::solve;
use crate::traits::solve::{
ExternalConstraints, ExternalConstraintsData, PredefinedOpaques, PredefinedOpaquesData,
self, CanonicalInput, ExternalConstraints, ExternalConstraintsData, PredefinedOpaques,
PredefinedOpaquesData, QueryResult, inspect,
};
use crate::ty::predicate::ExistentialPredicateStableCmpExt as _;
use crate::ty::{
@ -737,6 +737,17 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
self.opaque_types_defined_by(defining_anchor).iter().chain(coroutines_defined_by),
)
}
type ProbeRef = &'tcx inspect::Probe<TyCtxt<'tcx>>;
fn mk_probe_ref(self, probe: inspect::Probe<Self>) -> &'tcx inspect::Probe<TyCtxt<'tcx>> {
self.arena.alloc(probe)
}
fn evaluate_root_goal_for_proof_tree_raw(
self,
canonical_goal: CanonicalInput<'tcx>,
) -> (QueryResult<'tcx>, &'tcx inspect::Probe<TyCtxt<'tcx>>) {
self.evaluate_root_goal_for_proof_tree_raw(canonical_goal)
}
}
macro_rules! bidirectional_lang_item_map {

View file

@ -13,7 +13,7 @@ use crate::ty::{EarlyBinder, GenericArgsRef};
pub enum GenericParamDefKind {
Lifetime,
Type { has_default: bool, synthetic: bool },
Const { has_default: bool, synthetic: bool },
Const { has_default: bool },
}
impl GenericParamDefKind {

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
itertools.workspace = true
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_apfloat.workspace = true
rustc_apfloat = "0.2.0"
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
@ -23,5 +23,5 @@ rustc_pattern_analysis = { path = "../rustc_pattern_analysis" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -293,9 +293,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.diverge_from(loop_block);
// Logic for `match`.
let scrutinee_place_builder =
unpack!(body_block = this.as_place_builder(body_block, scrutinee));
let scrutinee_span = this.thir.exprs[scrutinee].span;
let scrutinee_place_builder = unpack!(
body_block = this.lower_scrutinee(body_block, scrutinee, scrutinee_span)
);
let match_start_span = match_span.shrink_to_lo().to(scrutinee_span);
let mut patterns = Vec::with_capacity(arms.len());
@ -343,7 +345,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr_span,
|this| {
this.lower_match_arms(
destination,
state_place,
scrutinee_place_builder,
scrutinee_span,
arms,

View file

@ -388,7 +388,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
/// Evaluate the scrutinee and add the PlaceMention for it.
fn lower_scrutinee(
pub(crate) fn lower_scrutinee(
&mut self,
mut block: BasicBlock,
scrutinee_id: ExprId,

View file

@ -806,10 +806,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
);
body.coverage_info_hi = self.coverage_info.as_ref().map(|b| b.as_done());
use rustc_middle::mir::pretty;
let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx);
pretty::write_mir_fn(self.tcx, &body, &mut |_, _| Ok(()), &mut std::io::stdout(), options)
.unwrap();
let writer = pretty::MirWriter::new(self.tcx);
writer.write_mir_fn(&body, &mut std::io::stdout()).unwrap();
}
fn finish(self) -> Body<'tcx> {
@ -827,18 +825,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
);
body.coverage_info_hi = self.coverage_info.map(|b| b.into_done());
let writer = pretty::MirWriter::new(self.tcx);
for (index, block) in body.basic_blocks.iter().enumerate() {
if block.terminator.is_none() {
use rustc_middle::mir::pretty;
let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx);
pretty::write_mir_fn(
self.tcx,
&body,
&mut |_, _| Ok(()),
&mut std::io::stdout(),
options,
)
.unwrap();
writer.write_mir_fn(&body, &mut std::io::stdout()).unwrap();
span_bug!(self.fn_span, "no terminator on block {:?}", index);
}
}

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
polonius-engine.workspace = true
polonius-engine = "0.13.0"
regex = "1"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
@ -18,5 +18,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -10,8 +10,7 @@ use std::{io, ops, str};
use regex::Regex;
use rustc_index::bit_set::DenseBitSet;
use rustc_middle::mir::{
self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name,
traversal,
self, BasicBlock, Body, Location, MirDumper, graphviz_safe_def_name, traversal,
};
use rustc_middle::ty::TyCtxt;
use rustc_middle::ty::print::with_no_trimmed_paths;
@ -61,11 +60,13 @@ where
fs::File::create_buffered(&path)?
}
None if dump_enabled(tcx, A::NAME, def_id) => {
create_dump_file(tcx, "dot", false, A::NAME, &pass_name.unwrap_or("-----"), body)?
None => {
let Some(dumper) = MirDumper::new(tcx, A::NAME, body) else {
return Ok(());
};
let disambiguator = &pass_name.unwrap_or("-----");
dumper.set_disambiguator(disambiguator).create_dump_file("dot", body)?
}
_ => return Ok(()),
}
};
let mut file = match file {

View file

@ -5,8 +5,8 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
either.workspace = true
itertools.workspace = true
either = "1"
itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
@ -26,5 +26,5 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -1294,7 +1294,9 @@ fn create_coroutine_resume_function<'tcx>(
pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_resume", body) {
dumper.dump_mir(body);
}
}
/// An operation that can be performed on a coroutine.
@ -1446,7 +1448,9 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_before", body) {
dumper.dump_mir(body);
}
// The first argument is the coroutine type passed by value
let coroutine_ty = body.local_decls.raw[1].ty;
@ -1506,7 +1510,10 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
) {
let context_mut_ref = transform_async_context(tcx, body);
expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_async_drop_expand", body) {
dumper.dump_mir(body);
}
} else {
cleanup_async_drops(body);
}
@ -1605,14 +1612,18 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
// This is expanded to a drop ladder in `elaborate_coroutine_drops`.
let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_pre-elab", body) {
dumper.dump_mir(body);
}
// Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
// If any upvars are moved out of, drop elaboration will handle upvar destruction.
// However we need to also elaborate the code generated by `insert_clean_drop`.
elaborate_coroutine_drops(tcx, body);
dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_post-transform", body) {
dumper.dump_mir(body);
}
let can_unwind = can_unwind(tcx, body);

View file

@ -77,7 +77,7 @@ use rustc_hir::definitions::DisambiguatorState;
use rustc_middle::bug;
use rustc_middle::hir::place::{Projection, ProjectionKind};
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::{self, dump_mir};
use rustc_middle::mir::{self, MirDumper};
use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt};
pub(crate) fn coroutine_by_move_body_def_id<'tcx>(
@ -225,7 +225,10 @@ pub(crate) fn coroutine_by_move_body_def_id<'tcx>(
);
by_move_body.source =
mir::MirSource::from_instance(InstanceKind::Item(body_def.def_id().to_def_id()));
dump_mir(tcx, false, "built", &"after", &by_move_body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "built", &by_move_body) {
dumper.set_disambiguator(&"after").dump_mir(&by_move_body);
}
// Feed HIR because we try to access this body's attrs in the inliner.
body_def.feed_hir();

View file

@ -605,7 +605,9 @@ pub(super) fn create_coroutine_drop_shim<'tcx>(
// Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible
// filename.
body.source.instance = coroutine_instance;
dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop", &body) {
dumper.dump_mir(&body);
}
body.source.instance = drop_instance;
// Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)`
@ -696,7 +698,9 @@ pub(super) fn create_coroutine_drop_shim_async<'tcx>(
None,
);
dump_mir(tcx, false, "coroutine_drop_async", &0, &body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop_async", &body) {
dumper.dump_mir(&body);
}
body
}
@ -741,7 +745,9 @@ pub(super) fn create_coroutine_drop_shim_proxy_async<'tcx>(
};
body.basic_blocks_mut()[call_bb].terminator = Some(Terminator { source_info, kind });
dump_mir(tcx, false, "coroutine_drop_proxy_async", &0, &body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop_proxy_async", &body) {
dumper.dump_mir(&body);
}
body
}

View file

@ -0,0 +1,127 @@
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet, IndexEntry};
use rustc_middle::mir::coverage::BasicCoverageBlock;
use rustc_span::{ExpnId, ExpnKind, Span};
#[derive(Clone, Copy, Debug)]
pub(crate) struct SpanWithBcb {
pub(crate) span: Span,
pub(crate) bcb: BasicCoverageBlock,
}
#[derive(Debug)]
pub(crate) struct ExpnTree {
nodes: FxIndexMap<ExpnId, ExpnNode>,
}
impl ExpnTree {
pub(crate) fn get(&self, expn_id: ExpnId) -> Option<&ExpnNode> {
self.nodes.get(&expn_id)
}
/// Yields the tree node for the given expansion ID (if present), followed
/// by the nodes of all of its descendants in depth-first order.
pub(crate) fn iter_node_and_descendants(
&self,
root_expn_id: ExpnId,
) -> impl Iterator<Item = &ExpnNode> {
gen move {
let Some(root_node) = self.get(root_expn_id) else { return };
yield root_node;
// Stack of child-node-ID iterators that drives the depth-first traversal.
let mut iter_stack = vec![root_node.child_expn_ids.iter()];
while let Some(curr_iter) = iter_stack.last_mut() {
// Pull the next ID from the top of the stack.
let Some(&curr_id) = curr_iter.next() else {
iter_stack.pop();
continue;
};
// Yield this node.
let Some(node) = self.get(curr_id) else { continue };
yield node;
// Push the node's children, to be traversed next.
if !node.child_expn_ids.is_empty() {
iter_stack.push(node.child_expn_ids.iter());
}
}
}
}
}
#[derive(Debug)]
pub(crate) struct ExpnNode {
/// Storing the expansion ID in its own node is not strictly necessary,
/// but is helpful for debugging and might be useful later.
#[expect(dead_code)]
pub(crate) expn_id: ExpnId,
// Useful info extracted from `ExpnData`.
pub(crate) expn_kind: ExpnKind,
/// Non-dummy `ExpnData::call_site` span.
pub(crate) call_site: Option<Span>,
/// Expansion ID of `call_site`, if present.
/// This links an expansion node to its parent in the tree.
pub(crate) call_site_expn_id: Option<ExpnId>,
/// Spans (and their associated BCBs) belonging to this expansion.
pub(crate) spans: Vec<SpanWithBcb>,
/// Expansions whose call-site is in this expansion.
pub(crate) child_expn_ids: FxIndexSet<ExpnId>,
}
impl ExpnNode {
fn new(expn_id: ExpnId) -> Self {
let expn_data = expn_id.expn_data();
let call_site = Some(expn_data.call_site).filter(|sp| !sp.is_dummy());
let call_site_expn_id = try { call_site?.ctxt().outer_expn() };
Self {
expn_id,
expn_kind: expn_data.kind.clone(),
call_site,
call_site_expn_id,
spans: vec![],
child_expn_ids: FxIndexSet::default(),
}
}
}
/// Given a collection of span/BCB pairs from potentially-different syntax contexts,
/// arranges them into an "expansion tree" based on their expansion call-sites.
pub(crate) fn build_expn_tree(spans: impl IntoIterator<Item = SpanWithBcb>) -> ExpnTree {
let mut nodes = FxIndexMap::default();
let new_node = |&expn_id: &ExpnId| ExpnNode::new(expn_id);
for span_with_bcb in spans {
// Create a node for this span's enclosing expansion, and add the span to it.
let expn_id = span_with_bcb.span.ctxt().outer_expn();
let node = nodes.entry(expn_id).or_insert_with_key(new_node);
node.spans.push(span_with_bcb);
// Now walk up the expansion call-site chain, creating nodes and registering children.
let mut prev = expn_id;
let mut curr_expn_id = node.call_site_expn_id;
while let Some(expn_id) = curr_expn_id {
let entry = nodes.entry(expn_id);
let node_existed = matches!(entry, IndexEntry::Occupied(_));
let node = entry.or_insert_with_key(new_node);
node.child_expn_ids.insert(prev);
if node_existed {
break;
}
prev = expn_id;
curr_expn_id = node.call_site_expn_id;
}
}
ExpnTree { nodes }
}

View file

@ -8,6 +8,7 @@ use crate::coverage::graph::CoverageGraph;
use crate::coverage::mappings::ExtractedMappings;
mod counters;
mod expansion;
mod graph;
mod hir_info;
mod mappings;

View file

@ -1,15 +1,14 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::mir;
use rustc_middle::mir::coverage::{Mapping, MappingKind, START_BCB};
use rustc_middle::ty::TyCtxt;
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span};
use rustc_span::{BytePos, DesugaringKind, ExpnId, ExpnKind, MacroKind, Span};
use tracing::instrument;
use crate::coverage::expansion::{self, ExpnTree, SpanWithBcb};
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
use crate::coverage::hir_info::ExtractedHirInfo;
use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir, SpanFromMir};
use crate::coverage::unexpand;
use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir};
mod from_mir;
@ -34,19 +33,51 @@ pub(super) fn extract_refined_covspans<'tcx>(
let &ExtractedHirInfo { body_span, .. } = hir_info;
let raw_spans = from_mir::extract_raw_spans_from_mir(mir_body, graph);
let mut covspans = raw_spans
.into_iter()
.filter_map(|RawSpanFromMir { raw_span, bcb }| try {
let (span, expn_kind) =
unexpand::unexpand_into_body_span_with_expn_kind(raw_span, body_span)?;
// Discard any spans that fill the entire body, because they tend
// to represent compiler-inserted code, e.g. implicitly returning `()`.
if span.source_equal(body_span) {
return None;
};
SpanFromMir { span, expn_kind, bcb }
})
.collect::<Vec<_>>();
// Use the raw spans to build a tree of expansions for this function.
let expn_tree = expansion::build_expn_tree(
raw_spans
.into_iter()
.map(|RawSpanFromMir { raw_span, bcb }| SpanWithBcb { span: raw_span, bcb }),
);
let mut covspans = vec![];
let mut push_covspan = |covspan: Covspan| {
let covspan_span = covspan.span;
// Discard any spans not contained within the function body span.
// Also discard any spans that fill the entire body, because they tend
// to represent compiler-inserted code, e.g. implicitly returning `()`.
if !body_span.contains(covspan_span) || body_span.source_equal(covspan_span) {
return;
}
// Each pushed covspan should have the same context as the body span.
// If it somehow doesn't, discard the covspan, or panic in debug builds.
if !body_span.eq_ctxt(covspan_span) {
debug_assert!(
false,
"span context mismatch: body_span={body_span:?}, covspan.span={covspan_span:?}"
);
return;
}
covspans.push(covspan);
};
if let Some(node) = expn_tree.get(body_span.ctxt().outer_expn()) {
for &SpanWithBcb { span, bcb } in &node.spans {
push_covspan(Covspan { span, bcb });
}
// For each expansion with its call-site in the body span, try to
// distill a corresponding covspan.
for &child_expn_id in &node.child_expn_ids {
if let Some(covspan) =
single_covspan_for_child_expn(tcx, graph, &expn_tree, child_expn_id)
{
push_covspan(covspan);
}
}
}
// Only proceed if we found at least one usable span.
if covspans.is_empty() {
@ -57,17 +88,10 @@ pub(super) fn extract_refined_covspans<'tcx>(
// Otherwise, add a fake span at the start of the body, to avoid an ugly
// gap between the start of the body and the first real span.
// FIXME: Find a more principled way to solve this problem.
covspans.push(SpanFromMir::for_fn_sig(
hir_info.fn_sig_span.unwrap_or_else(|| body_span.shrink_to_lo()),
));
// First, perform the passes that need macro information.
covspans.sort_by(|a, b| graph.cmp_in_dominator_order(a.bcb, b.bcb));
remove_unwanted_expansion_spans(&mut covspans);
shrink_visible_macro_spans(tcx, &mut covspans);
// We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
let mut covspans = covspans.into_iter().map(SpanFromMir::into_covspan).collect::<Vec<_>>();
covspans.push(Covspan {
span: hir_info.fn_sig_span.unwrap_or_else(|| body_span.shrink_to_lo()),
bcb: START_BCB,
});
let compare_covspans = |a: &Covspan, b: &Covspan| {
compare_spans(a.span, b.span)
@ -117,43 +141,37 @@ pub(super) fn extract_refined_covspans<'tcx>(
}));
}
/// Macros that expand into branches (e.g. `assert!`, `trace!`) tend to generate
/// multiple condition/consequent blocks that have the span of the whole macro
/// invocation, which is unhelpful. Keeping only the first such span seems to
/// give better mappings, so remove the others.
///
/// Similarly, `await` expands to a branch on the discriminant of `Poll`, which
/// leads to incorrect coverage if the `Future` is immediately ready (#98712).
///
/// (The input spans should be sorted in BCB dominator order, so that the
/// retained "first" span is likely to dominate the others.)
fn remove_unwanted_expansion_spans(covspans: &mut Vec<SpanFromMir>) {
let mut deduplicated_spans = FxHashSet::default();
/// For a single child expansion, try to distill it into a single span+BCB mapping.
fn single_covspan_for_child_expn(
tcx: TyCtxt<'_>,
graph: &CoverageGraph,
expn_tree: &ExpnTree,
expn_id: ExpnId,
) -> Option<Covspan> {
let node = expn_tree.get(expn_id)?;
covspans.retain(|covspan| {
match covspan.expn_kind {
// Retain only the first await-related or macro-expanded covspan with this span.
Some(ExpnKind::Desugaring(DesugaringKind::Await)) => {
deduplicated_spans.insert(covspan.span)
}
Some(ExpnKind::Macro(MacroKind::Bang, _)) => deduplicated_spans.insert(covspan.span),
// Ignore (retain) other spans.
_ => true,
let bcbs =
expn_tree.iter_node_and_descendants(expn_id).flat_map(|n| n.spans.iter().map(|s| s.bcb));
let bcb = match node.expn_kind {
// For bang-macros (e.g. `assert!`, `trace!`) and for `await`, taking
// the "first" BCB in dominator order seems to give good results.
ExpnKind::Macro(MacroKind::Bang, _) | ExpnKind::Desugaring(DesugaringKind::Await) => {
bcbs.min_by(|&a, &b| graph.cmp_in_dominator_order(a, b))?
}
});
}
// For other kinds of expansion, taking the "last" (most-dominated) BCB
// seems to give good results.
_ => bcbs.max_by(|&a, &b| graph.cmp_in_dominator_order(a, b))?,
};
/// When a span corresponds to a macro invocation that is visible from the
/// function body, truncate it to just the macro name plus `!`.
/// This seems to give better results for code that uses macros.
fn shrink_visible_macro_spans(tcx: TyCtxt<'_>, covspans: &mut Vec<SpanFromMir>) {
let source_map = tcx.sess.source_map();
for covspan in covspans {
if matches!(covspan.expn_kind, Some(ExpnKind::Macro(MacroKind::Bang, _))) {
covspan.span = source_map.span_through_char(covspan.span, '!');
}
// For bang-macro expansions, limit the call-site span to just the macro
// name plus `!`, excluding the macro arguments.
let mut span = node.call_site?;
if matches!(node.expn_kind, ExpnKind::Macro(MacroKind::Bang, _)) {
span = tcx.sess.source_map().span_through_char(span, '!');
}
Some(Covspan { span, bcb })
}
/// Discard all covspans that overlap a hole.

View file

@ -5,10 +5,9 @@ use rustc_middle::mir::coverage::CoverageKind;
use rustc_middle::mir::{
self, FakeReadCause, Statement, StatementKind, Terminator, TerminatorKind,
};
use rustc_span::{ExpnKind, Span};
use rustc_span::Span;
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB};
use crate::coverage::spans::Covspan;
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
#[derive(Debug)]
pub(crate) struct RawSpanFromMir {
@ -160,32 +159,3 @@ impl Hole {
true
}
}
#[derive(Debug)]
pub(crate) struct SpanFromMir {
/// A span that has been extracted from MIR and then "un-expanded" back to
/// within the current function's `body_span`. After various intermediate
/// processing steps, this span is emitted as part of the final coverage
/// mappings.
///
/// With the exception of `fn_sig_span`, this should always be contained
/// within `body_span`.
pub(crate) span: Span,
pub(crate) expn_kind: Option<ExpnKind>,
pub(crate) bcb: BasicCoverageBlock,
}
impl SpanFromMir {
pub(crate) fn for_fn_sig(fn_sig_span: Span) -> Self {
Self::new(fn_sig_span, None, START_BCB)
}
pub(crate) fn new(span: Span, expn_kind: Option<ExpnKind>, bcb: BasicCoverageBlock) -> Self {
Self { span, expn_kind, bcb }
}
pub(crate) fn into_covspan(self) -> Covspan {
let Self { span, expn_kind: _, bcb } = self;
Covspan { span, bcb }
}
}

View file

@ -1,4 +1,4 @@
use rustc_span::{ExpnKind, Span};
use rustc_span::Span;
/// Walks through the expansion ancestors of `original_span` to find a span that
/// is contained in `body_span` and has the same [syntax context] as `body_span`.
@ -7,49 +7,3 @@ pub(crate) fn unexpand_into_body_span(original_span: Span, body_span: Span) -> O
// we can just delegate directly to `find_ancestor_inside_same_ctxt`.
original_span.find_ancestor_inside_same_ctxt(body_span)
}
/// Walks through the expansion ancestors of `original_span` to find a span that
/// is contained in `body_span` and has the same [syntax context] as `body_span`.
///
/// If the returned span represents a bang-macro invocation (e.g. `foo!(..)`),
/// the returned symbol will be the name of that macro (e.g. `foo`).
pub(crate) fn unexpand_into_body_span_with_expn_kind(
original_span: Span,
body_span: Span,
) -> Option<(Span, Option<ExpnKind>)> {
let (span, prev) = unexpand_into_body_span_with_prev(original_span, body_span)?;
let expn_kind = prev.map(|prev| prev.ctxt().outer_expn_data().kind);
Some((span, expn_kind))
}
/// Walks through the expansion ancestors of `original_span` to find a span that
/// is contained in `body_span` and has the same [syntax context] as `body_span`.
/// The ancestor that was traversed just before the matching span (if any) is
/// also returned.
///
/// For example, a return value of `Some((ancestor, Some(prev)))` means that:
/// - `ancestor == original_span.find_ancestor_inside_same_ctxt(body_span)`
/// - `prev.parent_callsite() == ancestor`
///
/// [syntax context]: rustc_span::SyntaxContext
fn unexpand_into_body_span_with_prev(
original_span: Span,
body_span: Span,
) -> Option<(Span, Option<Span>)> {
let mut prev = None;
let mut curr = original_span;
while !body_span.contains(curr) || !curr.eq_ctxt(body_span) {
prev = Some(curr);
curr = curr.parent_callsite()?;
}
debug_assert_eq!(Some(curr), original_span.find_ancestor_inside_same_ctxt(body_span));
if let Some(prev) = prev {
debug_assert_eq!(Some(curr), prev.parent_callsite());
}
Some((curr, prev))
}

View file

@ -137,8 +137,8 @@ use rustc_index::interval::SparseIntervalMatrix;
use rustc_middle::bug;
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::{
Body, HasLocalDecls, InlineAsmOperand, Local, LocalKind, Location, Operand, PassWhere, Place,
Rvalue, Statement, StatementKind, TerminatorKind, dump_mir, traversal,
Body, HasLocalDecls, InlineAsmOperand, Local, LocalKind, Location, MirDumper, Operand,
PassWhere, Place, Rvalue, Statement, StatementKind, TerminatorKind, traversal,
};
use rustc_middle::ty::TyCtxt;
use rustc_mir_dataflow::Analysis;
@ -810,11 +810,15 @@ fn dest_prop_mir_dump<'tcx>(
let location = points.point_from_location(location);
live.rows().filter(|&r| live.contains(r, location)).collect::<Vec<_>>()
};
dump_mir(tcx, false, "DestinationPropagation-dataflow", &round, body, |pass_where, w| {
if let PassWhere::BeforeLocation(loc) = pass_where {
writeln!(w, " // live: {:?}", locals_live_at(loc))?;
}
Ok(())
});
if let Some(dumper) = MirDumper::new(tcx, "DestinationPropagation-dataflow", body) {
let extra_data = &|pass_where, w: &mut dyn std::io::Write| {
if let PassWhere::BeforeLocation(loc) = pass_where {
writeln!(w, " // live: {:?}", locals_live_at(loc))?;
}
Ok(())
};
dumper.set_disambiguator(&round).set_extra_data(extra_data).dump_mir(body)
}
}

View file

@ -5,6 +5,7 @@
#![feature(const_type_name)]
#![feature(cow_is_borrowed)]
#![feature(file_buffered)]
#![feature(gen_blocks)]
#![feature(if_let_guard)]
#![feature(impl_trait_in_assoc_type)]
#![feature(try_blocks)]

View file

@ -12,8 +12,8 @@ use rustc_index::{IndexSlice, IndexVec};
use rustc_macros::{LintDiagnostic, Subdiagnostic};
use rustc_middle::bug;
use rustc_middle::mir::{
self, BasicBlock, Body, ClearCrossCrate, Local, Location, Place, StatementKind, TerminatorKind,
dump_mir,
self, BasicBlock, Body, ClearCrossCrate, Local, Location, MirDumper, Place, StatementKind,
TerminatorKind,
};
use rustc_middle::ty::significant_drop_order::{
extract_component_with_significant_dtor, ty_dtor_span,
@ -227,7 +227,10 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
return;
}
dump_mir(tcx, false, "lint_tail_expr_drop_order", &0 as _, body, |_, _| Ok(()));
if let Some(dumper) = MirDumper::new(tcx, "lint_tail_expr_drop_order", body) {
dumper.dump_mir(body);
}
let locals_with_user_names = collect_user_names(body);
let is_closure_like = tcx.is_closure_like(def_id.to_def_id());

View file

@ -2,7 +2,7 @@ use std::cell::RefCell;
use std::collections::hash_map::Entry;
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_middle::mir::{self, Body, MirPhase, RuntimePhase};
use rustc_middle::mir::{Body, MirDumper, MirPhase, RuntimePhase};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use tracing::trace;
@ -281,16 +281,22 @@ fn run_passes_inner<'tcx>(
let lint = tcx.sess.opts.unstable_opts.lint_mir;
for pass in passes {
let name = pass.name();
let pass_name = pass.name();
if !should_run_pass(tcx, *pass, optimizations) {
continue;
};
let dump_enabled = pass.is_mir_dump_enabled();
let dumper = if pass.is_mir_dump_enabled()
&& let Some(dumper) = MirDumper::new(tcx, pass_name, body)
{
Some(dumper.set_show_pass_num().set_disambiguator(&"before"))
} else {
None
};
if dump_enabled {
dump_mir_for_pass(tcx, body, name, false);
if let Some(dumper) = dumper.as_ref() {
dumper.dump_mir(body);
}
if let Some(prof_arg) = &prof_arg {
@ -302,14 +308,15 @@ fn run_passes_inner<'tcx>(
pass.run_pass(tcx, body);
}
if dump_enabled {
dump_mir_for_pass(tcx, body, name, true);
if let Some(dumper) = dumper {
dumper.set_disambiguator(&"after").dump_mir(body);
}
if validate {
validate_body(tcx, body, format!("after pass {name}"));
validate_body(tcx, body, format!("after pass {pass_name}"));
}
if lint {
lint_body(tcx, body, format!("after pass {name}"));
lint_body(tcx, body, format!("after pass {pass_name}"));
}
body.pass_count += 1;
@ -345,18 +352,9 @@ pub(super) fn validate_body<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, when
validate::Validator { when }.run_pass(tcx, body);
}
fn dump_mir_for_pass<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, pass_name: &str, is_after: bool) {
mir::dump_mir(
tcx,
true,
pass_name,
if is_after { &"after" } else { &"before" },
body,
|_, _| Ok(()),
);
}
pub(super) fn dump_mir_for_phase_change<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
assert_eq!(body.pass_count, 0);
mir::dump_mir(tcx, true, body.phase.name(), &"after", body, |_, _| Ok(()))
if let Some(dumper) = MirDumper::new(tcx, body.phase.name(), body) {
dumper.set_show_pass_num().set_disambiguator(&"after").dump_mir(body)
}
}

View file

@ -1242,14 +1242,12 @@ fn build_construct_coroutine_by_move_shim<'tcx>(
let body =
new_body(source, IndexVec::from_elem_n(start_block, 1), locals, sig.inputs().len(), span);
dump_mir(
tcx,
false,
if receiver_by_ref { "coroutine_closure_by_ref" } else { "coroutine_closure_by_move" },
&0,
&body,
|_, _| Ok(()),
);
let pass_name =
if receiver_by_ref { "coroutine_closure_by_ref" } else { "coroutine_closure_by_move" };
if let Some(dumper) = MirDumper::new(tcx, pass_name, &body) {
dumper.dump_mir(&body);
}
body
}

View file

@ -16,6 +16,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
serde = "1"
serde_json.workspace = true
tracing.workspace = true
serde_json = "1"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -5,13 +5,13 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
derive-where.workspace = true
derive-where = "1.2.7"
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
rustc_index = { path = "../rustc_index", default-features = false }
rustc_macros = { path = "../rustc_macros", optional = true }
rustc_type_ir = { path = "../rustc_type_ir", default-features = false }
rustc_type_ir_macros = { path = "../rustc_type_ir_macros" }
tracing.workspace = true
tracing = "0.1"
# tidy-alphabetical-end
[features]

View file

@ -56,22 +56,23 @@ where
///
/// This expects `goal` and `opaque_types` to be eager resolved.
pub(super) fn canonicalize_goal(
&self,
delegate: &D,
goal: Goal<I, I::Predicate>,
opaque_types: Vec<(ty::OpaqueTypeKey<I>, I::Ty)>,
) -> (Vec<I::GenericArg>, CanonicalInput<I, I::Predicate>) {
let mut orig_values = Default::default();
let canonical = Canonicalizer::canonicalize_input(
self.delegate,
delegate,
&mut orig_values,
QueryInput {
goal,
predefined_opaques_in_body: self
predefined_opaques_in_body: delegate
.cx()
.mk_predefined_opaques_in_body(PredefinedOpaquesData { opaque_types }),
},
);
let query_input = ty::CanonicalQueryInput { canonical, typing_mode: self.typing_mode() };
let query_input =
ty::CanonicalQueryInput { canonical, typing_mode: delegate.typing_mode() };
(orig_values, query_input)
}
@ -271,28 +272,23 @@ where
/// - we apply the `external_constraints` returned by the query, returning
/// the `normalization_nested_goals`
pub(super) fn instantiate_and_apply_query_response(
&mut self,
delegate: &D,
param_env: I::ParamEnv,
original_values: &[I::GenericArg],
response: CanonicalResponse<I>,
span: I::Span,
) -> (NestedNormalizationGoals<I>, Certainty) {
let instantiation = Self::compute_query_response_instantiation_values(
self.delegate,
delegate,
&original_values,
&response,
self.origin_span,
span,
);
let Response { var_values, external_constraints, certainty } =
self.delegate.instantiate_canonical(response, instantiation);
delegate.instantiate_canonical(response, instantiation);
Self::unify_query_var_values(
self.delegate,
param_env,
&original_values,
var_values,
self.origin_span,
);
Self::unify_query_var_values(delegate, param_env, &original_values, var_values, span);
let ExternalConstraintsData {
region_constraints,
@ -300,8 +296,8 @@ where
normalization_nested_goals,
} = &*external_constraints;
self.register_region_constraints(region_constraints);
self.register_new_opaque_types(opaque_types);
Self::register_region_constraints(delegate, region_constraints, span);
Self::register_new_opaque_types(delegate, opaque_types, span);
(normalization_nested_goals.clone(), certainty)
}
@ -424,21 +420,26 @@ where
}
fn register_region_constraints(
&mut self,
delegate: &D,
outlives: &[ty::OutlivesPredicate<I, I::GenericArg>],
span: I::Span,
) {
for &ty::OutlivesPredicate(lhs, rhs) in outlives {
match lhs.kind() {
ty::GenericArgKind::Lifetime(lhs) => self.register_region_outlives(lhs, rhs),
ty::GenericArgKind::Type(lhs) => self.register_ty_outlives(lhs, rhs),
ty::GenericArgKind::Lifetime(lhs) => delegate.sub_regions(rhs, lhs, span),
ty::GenericArgKind::Type(lhs) => delegate.register_ty_outlives(lhs, rhs, span),
ty::GenericArgKind::Const(_) => panic!("const outlives: {lhs:?}: {rhs:?}"),
}
}
}
fn register_new_opaque_types(&mut self, opaque_types: &[(ty::OpaqueTypeKey<I>, I::Ty)]) {
fn register_new_opaque_types(
delegate: &D,
opaque_types: &[(ty::OpaqueTypeKey<I>, I::Ty)],
span: I::Span,
) {
for &(key, ty) in opaque_types {
let prev = self.delegate.register_hidden_type_in_storage(key, ty, self.origin_span);
let prev = delegate.register_hidden_type_in_storage(key, ty, span);
// We eagerly resolve inference variables when computing the query response.
// This can cause previously distinct opaque type keys to now be structurally equal.
//
@ -447,7 +448,7 @@ where
// types here. However, doing so is difficult as it may result in nested goals and
// any errors may make it harder to track the control flow for diagnostics.
if let Some(prev) = prev {
self.delegate.add_duplicate_opaque_type(key, prev, self.origin_span);
delegate.add_duplicate_opaque_type(key, prev, span);
}
}
}

Some files were not shown because too many files have changed in this diff Show more