Auto merge of #3265 - rust-lang:rustup-2024-01-13, r=saethlin
Automatic Rustup
This commit is contained in:
commit
12c0f090af
401 changed files with 7539 additions and 2399 deletions
23
Cargo.lock
23
Cargo.lock
|
|
@ -119,6 +119,16 @@ dependencies = [
|
|||
"yansi-term",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotate-snippets"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a433302f833baa830c0092100c481c7ea768c5981a3c36f549517a502f246dd"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.12.1"
|
||||
|
|
@ -3771,7 +3781,7 @@ dependencies = [
|
|||
name = "rustc_errors"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.10.1",
|
||||
"derive_setters",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
|
|
@ -3831,7 +3841,7 @@ dependencies = [
|
|||
name = "rustc_fluent_macro"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.10.1",
|
||||
"fluent-bundle",
|
||||
"fluent-syntax",
|
||||
"proc-macro2",
|
||||
|
|
@ -4564,6 +4574,7 @@ checksum = "8ba09476327c4b70ccefb6180f046ef588c26a24cf5d269a9feba316eb4f029f"
|
|||
name = "rustc_trait_selection"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"itertools",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
|
|
@ -4738,7 +4749,7 @@ dependencies = [
|
|||
name = "rustfmt-nightly"
|
||||
version = "1.7.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"bytecount",
|
||||
"cargo_metadata 0.15.4",
|
||||
|
|
@ -5728,7 +5739,7 @@ version = "0.21.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aaf4bf7c184b8dfc7a4d3b90df789b1eb992ee42811cd115f32a7a1eb781058d"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"bstr",
|
||||
"cargo-platform",
|
||||
|
|
@ -5859,9 +5870,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.10"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
||||
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-core",
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ exclude = [
|
|||
]
|
||||
|
||||
[profile.release.package.compiler_builtins]
|
||||
# The compiler-builtins crate cannot reference libcore, and it's own CI will
|
||||
# The compiler-builtins crate cannot reference libcore, and its own CI will
|
||||
# verify that this is the case. This requires, however, that the crate is built
|
||||
# without overflow checks and debug assertions. Forcefully disable debug
|
||||
# assertions and overflow checks here which should ensure that even if these
|
||||
|
|
|
|||
|
|
@ -2399,10 +2399,10 @@ mod error {
|
|||
/// and we want only the best of those errors.
|
||||
///
|
||||
/// The `report_use_of_moved_or_uninitialized` function checks this map and replaces the
|
||||
/// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of the
|
||||
/// `Place` of the previous most diagnostic. This happens instead of buffering the error. Once
|
||||
/// all move errors have been reported, any diagnostics in this map are added to the buffer
|
||||
/// to be emitted.
|
||||
/// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of
|
||||
/// the `Place` of the previous most diagnostic. This happens instead of buffering the
|
||||
/// error. Once all move errors have been reported, any diagnostics in this map are added
|
||||
/// to the buffer to be emitted.
|
||||
///
|
||||
/// `BTreeMap` is used to preserve the order of insertions when iterating. This is necessary
|
||||
/// when errors in the map are being re-added to the error buffer so that errors with the
|
||||
|
|
@ -2410,7 +2410,8 @@ mod error {
|
|||
buffered_move_errors:
|
||||
BTreeMap<Vec<MoveOutIndex>, (PlaceRef<'tcx>, DiagnosticBuilder<'tcx>)>,
|
||||
buffered_mut_errors: FxIndexMap<Span, (DiagnosticBuilder<'tcx>, usize)>,
|
||||
/// Diagnostics to be reported buffer.
|
||||
/// Buffer of diagnostics to be reported. Uses `Diagnostic` rather than `DiagnosticBuilder`
|
||||
/// because it has a mixture of error diagnostics and non-error diagnostics.
|
||||
buffered: Vec<Diagnostic>,
|
||||
/// Set to Some if we emit an error during borrowck
|
||||
tainted_by_errors: Option<ErrorGuaranteed>,
|
||||
|
|
@ -2434,11 +2435,11 @@ mod error {
|
|||
"diagnostic buffered but not emitted",
|
||||
))
|
||||
}
|
||||
t.buffer(&mut self.buffered);
|
||||
self.buffered.push(t.into_diagnostic());
|
||||
}
|
||||
|
||||
pub fn buffer_non_error_diag(&mut self, t: DiagnosticBuilder<'_, ()>) {
|
||||
t.buffer(&mut self.buffered);
|
||||
self.buffered.push(t.into_diagnostic());
|
||||
}
|
||||
|
||||
pub fn set_tainted_by_errors(&mut self, e: ErrorGuaranteed) {
|
||||
|
|
@ -2486,13 +2487,13 @@ mod error {
|
|||
// Buffer any move errors that we collected and de-duplicated.
|
||||
for (_, (_, diag)) in std::mem::take(&mut self.errors.buffered_move_errors) {
|
||||
// We have already set tainted for this error, so just buffer it.
|
||||
diag.buffer(&mut self.errors.buffered);
|
||||
self.errors.buffered.push(diag.into_diagnostic());
|
||||
}
|
||||
for (_, (mut diag, count)) in std::mem::take(&mut self.errors.buffered_mut_errors) {
|
||||
if count > 10 {
|
||||
diag.note(format!("...and {} other attempted mutable borrows", count - 10));
|
||||
}
|
||||
diag.buffer(&mut self.errors.buffered);
|
||||
self.errors.buffered.push(diag.into_diagnostic());
|
||||
}
|
||||
|
||||
if !self.errors.buffered.is_empty() {
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ fn lookup_env<'cx>(cx: &'cx ExtCtxt<'_>, var: Symbol) -> Option<Symbol> {
|
|||
if let Some(value) = cx.sess.opts.logical_env.get(var) {
|
||||
return Some(Symbol::intern(value));
|
||||
}
|
||||
// If the environment variable was not defined with the `--env` option, we try to retrieve it
|
||||
// If the environment variable was not defined with the `--env-set` option, we try to retrieve it
|
||||
// from rustc's environment.
|
||||
env::var(var).ok().as_deref().map(Symbol::intern)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ fn prepare_lto(
|
|||
};
|
||||
|
||||
let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| {
|
||||
if info.level.is_below_threshold(export_threshold) || info.used || info.used_compiler {
|
||||
if info.level.is_below_threshold(export_threshold) || info.used {
|
||||
Some(CString::new(name.as_str()).unwrap())
|
||||
} else {
|
||||
None
|
||||
|
|
|
|||
|
|
@ -569,6 +569,7 @@ pub(crate) unsafe fn llvm_optimize(
|
|||
unroll_loops,
|
||||
config.vectorize_slp,
|
||||
config.vectorize_loop,
|
||||
config.no_builtins,
|
||||
config.emit_lifetime_markers,
|
||||
sanitizer_options.as_ref(),
|
||||
pgo_gen_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
|
||||
|
|
@ -677,6 +678,7 @@ pub(crate) unsafe fn codegen(
|
|||
unsafe fn with_codegen<'ll, F, R>(
|
||||
tm: &'ll llvm::TargetMachine,
|
||||
llmod: &'ll llvm::Module,
|
||||
no_builtins: bool,
|
||||
f: F,
|
||||
) -> R
|
||||
where
|
||||
|
|
@ -684,7 +686,7 @@ pub(crate) unsafe fn codegen(
|
|||
{
|
||||
let cpm = llvm::LLVMCreatePassManager();
|
||||
llvm::LLVMAddAnalysisPasses(tm, cpm);
|
||||
llvm::LLVMRustAddLibraryInfo(cpm, llmod);
|
||||
llvm::LLVMRustAddLibraryInfo(cpm, llmod, no_builtins);
|
||||
f(cpm)
|
||||
}
|
||||
|
||||
|
|
@ -785,7 +787,7 @@ pub(crate) unsafe fn codegen(
|
|||
} else {
|
||||
llmod
|
||||
};
|
||||
with_codegen(tm, llmod, |cpm| {
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(
|
||||
dcx,
|
||||
tm,
|
||||
|
|
@ -820,7 +822,7 @@ pub(crate) unsafe fn codegen(
|
|||
(_, SplitDwarfKind::Split) => Some(dwo_out.as_path()),
|
||||
};
|
||||
|
||||
with_codegen(tm, llmod, |cpm| {
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(
|
||||
dcx,
|
||||
tm,
|
||||
|
|
|
|||
|
|
@ -2173,8 +2173,13 @@ extern "C" {
|
|||
ArgsCstrBuff: *const c_char,
|
||||
ArgsCstrBuffLen: usize,
|
||||
) -> *mut TargetMachine;
|
||||
|
||||
pub fn LLVMRustDisposeTargetMachine(T: *mut TargetMachine);
|
||||
pub fn LLVMRustAddLibraryInfo<'a>(PM: &PassManager<'a>, M: &'a Module);
|
||||
pub fn LLVMRustAddLibraryInfo<'a>(
|
||||
PM: &PassManager<'a>,
|
||||
M: &'a Module,
|
||||
DisableSimplifyLibCalls: bool,
|
||||
);
|
||||
pub fn LLVMRustWriteOutputFile<'a>(
|
||||
T: &'a TargetMachine,
|
||||
PM: &PassManager<'a>,
|
||||
|
|
@ -2196,6 +2201,7 @@ extern "C" {
|
|||
UnrollLoops: bool,
|
||||
SLPVectorize: bool,
|
||||
LoopVectorize: bool,
|
||||
DisableSimplifyLibCalls: bool,
|
||||
EmitLifetimeMarkers: bool,
|
||||
SanitizerOptions: Option<&SanitizerOptions>,
|
||||
PGOGenPath: *const c_char,
|
||||
|
|
|
|||
|
|
@ -270,14 +270,8 @@ pub fn each_linked_rlib(
|
|||
|
||||
for &cnum in crates {
|
||||
match fmts.get(cnum.as_usize() - 1) {
|
||||
Some(&Linkage::NotLinked | &Linkage::Dynamic) => continue,
|
||||
Some(&Linkage::IncludedFromDylib) => {
|
||||
// We always link crate `compiler_builtins` statically. When enabling LTO, we include it as well.
|
||||
if info.compiler_builtins != Some(cnum) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Some(&Linkage::Static) => {}
|
||||
Some(&Linkage::NotLinked | &Linkage::Dynamic | &Linkage::IncludedFromDylib) => continue,
|
||||
Some(_) => {}
|
||||
None => return Err(errors::LinkRlibError::MissingFormat),
|
||||
}
|
||||
let crate_name = info.crate_name[&cnum];
|
||||
|
|
@ -526,7 +520,8 @@ fn link_staticlib<'a>(
|
|||
&codegen_results.crate_info,
|
||||
Some(CrateType::Staticlib),
|
||||
&mut |cnum, path| {
|
||||
let lto = are_upstream_rust_objects_already_included(sess);
|
||||
let lto = are_upstream_rust_objects_already_included(sess)
|
||||
&& !ignored_for_lto(sess, &codegen_results.crate_info, cnum);
|
||||
|
||||
let native_libs = codegen_results.crate_info.native_libraries[&cnum].iter();
|
||||
let relevant = native_libs.clone().filter(|lib| relevant_lib(sess, lib));
|
||||
|
|
@ -1277,6 +1272,24 @@ fn link_sanitizer_runtime(
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns a boolean indicating whether the specified crate should be ignored
|
||||
/// during LTO.
|
||||
///
|
||||
/// Crates ignored during LTO are not lumped together in the "massive object
|
||||
/// file" that we create and are linked in their normal rlib states. See
|
||||
/// comments below for what crates do not participate in LTO.
|
||||
///
|
||||
/// It's unusual for a crate to not participate in LTO. Typically only
|
||||
/// compiler-specific and unstable crates have a reason to not participate in
|
||||
/// LTO.
|
||||
pub fn ignored_for_lto(sess: &Session, info: &CrateInfo, cnum: CrateNum) -> bool {
|
||||
// If our target enables builtin function lowering in LLVM then the
|
||||
// crates providing these functions don't participate in LTO (e.g.
|
||||
// no_builtins or compiler builtins crates).
|
||||
!sess.target.no_builtins
|
||||
&& (info.compiler_builtins == Some(cnum) || info.is_no_builtins.contains(&cnum))
|
||||
}
|
||||
|
||||
/// This functions tries to determine the appropriate linker (and corresponding LinkerFlavor) to use
|
||||
pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
|
||||
fn infer_from(
|
||||
|
|
@ -2742,6 +2755,10 @@ fn rehome_sysroot_lib_dir<'a>(sess: &'a Session, lib_dir: &Path) -> PathBuf {
|
|||
// symbols). We must continue to include the rest of the rlib, however, as
|
||||
// it may contain static native libraries which must be linked in.
|
||||
//
|
||||
// (*) Crates marked with `#![no_builtins]` don't participate in LTO and
|
||||
// their bytecode wasn't included. The object files in those libraries must
|
||||
// still be passed to the linker.
|
||||
//
|
||||
// Note, however, that if we're not doing LTO we can just pass the rlib
|
||||
// blindly to the linker (fast) because it's fine if it's not actually
|
||||
// included as we're at the end of the dependency chain.
|
||||
|
|
@ -2767,7 +2784,9 @@ fn add_static_crate<'a>(
|
|||
cmd.link_rlib(&rlib_path);
|
||||
};
|
||||
|
||||
if !are_upstream_rust_objects_already_included(sess) {
|
||||
if !are_upstream_rust_objects_already_included(sess)
|
||||
|| ignored_for_lto(sess, &codegen_results.crate_info, cnum)
|
||||
{
|
||||
link_upstream(cratepath);
|
||||
return;
|
||||
}
|
||||
|
|
@ -2781,6 +2800,8 @@ fn add_static_crate<'a>(
|
|||
let canonical_name = name.replace('-', "_");
|
||||
let upstream_rust_objects_already_included =
|
||||
are_upstream_rust_objects_already_included(sess);
|
||||
let is_builtins =
|
||||
sess.target.no_builtins || !codegen_results.crate_info.is_no_builtins.contains(&cnum);
|
||||
|
||||
let mut archive = archive_builder_builder.new_archive_builder(sess);
|
||||
if let Err(error) = archive.add_archive(
|
||||
|
|
@ -2797,8 +2818,9 @@ fn add_static_crate<'a>(
|
|||
|
||||
// If we're performing LTO and this is a rust-generated object
|
||||
// file, then we don't need the object file as it's part of the
|
||||
// LTO module.
|
||||
if upstream_rust_objects_already_included && is_rust_object {
|
||||
// LTO module. Note that `#![no_builtins]` is excluded from LTO,
|
||||
// though, so we let that object file slide.
|
||||
if upstream_rust_objects_already_included && is_rust_object && is_builtins {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -54,8 +54,8 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap<S
|
|||
// export level, however, as they're just implementation details.
|
||||
// Down below we'll hardwire all of the symbols to the `Rust` export
|
||||
// level instead.
|
||||
let is_compiler_builtins = tcx.is_compiler_builtins(LOCAL_CRATE);
|
||||
let special_runtime_crate = tcx.is_panic_runtime(LOCAL_CRATE) || is_compiler_builtins;
|
||||
let special_runtime_crate =
|
||||
tcx.is_panic_runtime(LOCAL_CRATE) || tcx.is_compiler_builtins(LOCAL_CRATE);
|
||||
|
||||
let mut reachable_non_generics: DefIdMap<_> = tcx
|
||||
.reachable_set(())
|
||||
|
|
@ -105,14 +105,8 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap<S
|
|||
}
|
||||
})
|
||||
.map(|def_id| {
|
||||
let codegen_attrs = tcx.codegen_fn_attrs(def_id.to_def_id());
|
||||
// We won't link right if this symbol is stripped during LTO.
|
||||
let name = tcx.symbol_name(Instance::mono(tcx, def_id.to_def_id())).name;
|
||||
// We have to preserve the symbols of the built-in functions during LTO.
|
||||
let is_builtin_fn = is_compiler_builtins
|
||||
&& symbol_export_level(tcx, def_id.to_def_id())
|
||||
.is_below_threshold(SymbolExportLevel::C)
|
||||
&& codegen_attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE);
|
||||
let used = name == "rust_eh_personality";
|
||||
|
||||
let export_level = if special_runtime_crate {
|
||||
|
|
@ -120,6 +114,7 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap<S
|
|||
} else {
|
||||
symbol_export_level(tcx, def_id.to_def_id())
|
||||
};
|
||||
let codegen_attrs = tcx.codegen_fn_attrs(def_id.to_def_id());
|
||||
debug!(
|
||||
"EXPORTED SYMBOL (local): {} ({:?})",
|
||||
tcx.symbol_name(Instance::mono(tcx, def_id.to_def_id())),
|
||||
|
|
@ -139,7 +134,6 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap<S
|
|||
used: codegen_attrs.flags.contains(CodegenFnAttrFlags::USED)
|
||||
|| codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
|
||||
|| used,
|
||||
used_compiler: is_builtin_fn,
|
||||
};
|
||||
(def_id.to_def_id(), info)
|
||||
})
|
||||
|
|
@ -152,7 +146,6 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap<S
|
|||
level: SymbolExportLevel::C,
|
||||
kind: SymbolExportKind::Data,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -201,7 +194,6 @@ fn exported_symbols_provider_local(
|
|||
level: info.level,
|
||||
kind: SymbolExportKind::Text,
|
||||
used: info.used,
|
||||
used_compiler: false,
|
||||
},
|
||||
)
|
||||
})
|
||||
|
|
@ -218,7 +210,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::C,
|
||||
kind: SymbolExportKind::Text,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
@ -238,7 +229,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Text,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
@ -251,7 +241,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Data,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
|
@ -271,7 +260,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::C,
|
||||
kind: SymbolExportKind::Data,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
)
|
||||
}));
|
||||
|
|
@ -297,7 +285,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::C,
|
||||
kind: SymbolExportKind::Data,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
)
|
||||
}));
|
||||
|
|
@ -315,7 +302,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::C,
|
||||
kind: SymbolExportKind::Data,
|
||||
used: true,
|
||||
used_compiler: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
@ -356,7 +342,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Text,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
@ -373,7 +358,6 @@ fn exported_symbols_provider_local(
|
|||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Text,
|
||||
used: false,
|
||||
used_compiler: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -148,12 +148,23 @@ impl ModuleConfig {
|
|||
|
||||
let emit_obj = if !should_emit_obj {
|
||||
EmitObj::None
|
||||
} else if sess.target.obj_is_bitcode || sess.opts.cg.linker_plugin_lto.enabled() {
|
||||
} else if sess.target.obj_is_bitcode
|
||||
|| (sess.opts.cg.linker_plugin_lto.enabled() && !no_builtins)
|
||||
{
|
||||
// This case is selected if the target uses objects as bitcode, or
|
||||
// if linker plugin LTO is enabled. In the linker plugin LTO case
|
||||
// the assumption is that the final link-step will read the bitcode
|
||||
// and convert it to object code. This may be done by either the
|
||||
// native linker or rustc itself.
|
||||
//
|
||||
// Note, however, that the linker-plugin-lto requested here is
|
||||
// explicitly ignored for `#![no_builtins]` crates. These crates are
|
||||
// specifically ignored by rustc's LTO passes and wouldn't work if
|
||||
// loaded into the linker. These crates define symbols that LLVM
|
||||
// lowers intrinsics to, and these symbol dependencies aren't known
|
||||
// until after codegen. As a result any crate marked
|
||||
// `#![no_builtins]` is assumed to not participate in LTO and
|
||||
// instead goes on to generate object code.
|
||||
EmitObj::Bitcode
|
||||
} else if need_bitcode_in_object(tcx) {
|
||||
EmitObj::ObjectCode(BitcodeSection::Full)
|
||||
|
|
@ -1023,6 +1034,9 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
|
||||
let mut each_linked_rlib_for_lto = Vec::new();
|
||||
drop(link::each_linked_rlib(crate_info, None, &mut |cnum, path| {
|
||||
if link::ignored_for_lto(sess, crate_info, cnum) {
|
||||
return;
|
||||
}
|
||||
each_linked_rlib_for_lto.push((cnum, path.to_path_buf()));
|
||||
}));
|
||||
|
||||
|
|
|
|||
|
|
@ -859,6 +859,7 @@ impl CrateInfo {
|
|||
local_crate_name,
|
||||
compiler_builtins,
|
||||
profiler_runtime: None,
|
||||
is_no_builtins: Default::default(),
|
||||
native_libraries: Default::default(),
|
||||
used_libraries: tcx.native_libraries(LOCAL_CRATE).iter().map(Into::into).collect(),
|
||||
crate_name: Default::default(),
|
||||
|
|
@ -885,6 +886,9 @@ impl CrateInfo {
|
|||
if tcx.is_profiler_runtime(cnum) {
|
||||
info.profiler_runtime = Some(cnum);
|
||||
}
|
||||
if tcx.is_no_builtins(cnum) {
|
||||
info.is_no_builtins.insert(cnum);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle circular dependencies in the standard library.
|
||||
|
|
@ -892,7 +896,9 @@ impl CrateInfo {
|
|||
// If global LTO is enabled then almost everything (*) is glued into a single object file,
|
||||
// so this logic is not necessary and can cause issues on some targets (due to weak lang
|
||||
// item symbols being "privatized" to that object file), so we disable it.
|
||||
// (*) Native libs are not glued, and we assume that they cannot define weak lang items.
|
||||
// (*) Native libs, and `#[compiler_builtins]` and `#[no_builtins]` crates are not glued,
|
||||
// and we assume that they cannot define weak lang items. This is not currently enforced
|
||||
// by the compiler, but that's ok because all this stuff is unstable anyway.
|
||||
let target = &tcx.sess.target;
|
||||
if !are_upstream_rust_objects_already_included(tcx.sess) {
|
||||
let missing_weak_lang_items: FxHashSet<Symbol> = info
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ extern crate tracing;
|
|||
extern crate rustc_middle;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_hir::def_id::CrateNum;
|
||||
use rustc_middle::dep_graph::WorkProduct;
|
||||
|
|
@ -158,6 +158,7 @@ pub struct CrateInfo {
|
|||
pub local_crate_name: Symbol,
|
||||
pub compiler_builtins: Option<CrateNum>,
|
||||
pub profiler_runtime: Option<CrateNum>,
|
||||
pub is_no_builtins: FxHashSet<CrateNum>,
|
||||
pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
|
||||
pub crate_name: FxHashMap<CrateNum, Symbol>,
|
||||
pub used_libraries: Vec<NativeLib>,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
|
||||
|
||||
use rustc_errors::{Diagnostic, ErrorGuaranteed};
|
||||
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
|
|
@ -214,7 +214,7 @@ pub struct Checker<'mir, 'tcx> {
|
|||
local_has_storage_dead: Option<BitSet<Local>>,
|
||||
|
||||
error_emitted: Option<ErrorGuaranteed>,
|
||||
secondary_errors: Vec<Diagnostic>,
|
||||
secondary_errors: Vec<DiagnosticBuilder<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
|
||||
|
|
@ -272,14 +272,17 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> {
|
|||
}
|
||||
|
||||
// If we got through const-checking without emitting any "primary" errors, emit any
|
||||
// "secondary" errors if they occurred.
|
||||
// "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
|
||||
let secondary_errors = mem::take(&mut self.secondary_errors);
|
||||
if self.error_emitted.is_none() {
|
||||
for error in secondary_errors {
|
||||
self.tcx.dcx().emit_diagnostic(error);
|
||||
error.emit();
|
||||
}
|
||||
} else {
|
||||
assert!(self.tcx.dcx().has_errors().is_some());
|
||||
for error in secondary_errors {
|
||||
error.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -347,7 +350,7 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> {
|
|||
self.error_emitted = Some(reported);
|
||||
}
|
||||
|
||||
ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
|
||||
ops::DiagnosticImportance::Secondary => self.secondary_errors.push(err),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
annotate-snippets = "0.9"
|
||||
annotate-snippets = "0.10"
|
||||
derive_setters = "0.1.6"
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
|
|
|
|||
|
|
@ -12,8 +12,7 @@ use crate::{
|
|||
CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, Emitter, FluentBundle,
|
||||
LazyFallbackBundle, Level, MultiSpan, Style, SubDiagnostic,
|
||||
};
|
||||
use annotate_snippets::display_list::{DisplayList, FormatOptions};
|
||||
use annotate_snippets::snippet::*;
|
||||
use annotate_snippets::{Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_error_messages::FluentArgs;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
|
|
@ -86,7 +85,7 @@ fn source_string(file: Lrc<SourceFile>, line: &Line) -> String {
|
|||
/// Maps `Diagnostic::Level` to `snippet::AnnotationType`
|
||||
fn annotation_type_for_level(level: Level) -> AnnotationType {
|
||||
match level {
|
||||
Level::Bug | Level::DelayedBug | Level::Fatal | Level::Error => AnnotationType::Error,
|
||||
Level::Bug | Level::DelayedBug(_) | Level::Fatal | Level::Error => AnnotationType::Error,
|
||||
Level::ForceWarning(_) | Level::Warning => AnnotationType::Warning,
|
||||
Level::Note | Level::OnceNote => AnnotationType::Note,
|
||||
Level::Help | Level::OnceHelp => AnnotationType::Help,
|
||||
|
|
@ -190,11 +189,6 @@ impl AnnotateSnippetEmitter {
|
|||
annotation_type: annotation_type_for_level(*level),
|
||||
}),
|
||||
footer: vec![],
|
||||
opt: FormatOptions {
|
||||
color: true,
|
||||
anonymized_line_numbers: self.ui_testing,
|
||||
margin: None,
|
||||
},
|
||||
slices: annotated_files
|
||||
.iter()
|
||||
.map(|(file_name, source, line_index, annotations)| {
|
||||
|
|
@ -222,7 +216,8 @@ impl AnnotateSnippetEmitter {
|
|||
// FIXME(#59346): Figure out if we can _always_ print to stderr or not.
|
||||
// `emitter.rs` has the `Destination` enum that lists various possible output
|
||||
// destinations.
|
||||
eprintln!("{}", DisplayList::from(snippet))
|
||||
let renderer = Renderer::plain().anonymized_line_numbers(self.ui_testing);
|
||||
eprintln!("{}", renderer.render(snippet))
|
||||
}
|
||||
// FIXME(#59346): Is it ok to return None if there's no source_map?
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use crate::snippet::Style;
|
||||
use crate::{
|
||||
CodeSuggestion, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, Level, MultiSpan,
|
||||
SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
||||
CodeSuggestion, DelayedBugKind, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, Level,
|
||||
MultiSpan, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
||||
};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_error_messages::fluent_value_from_str_list_sep_by_and;
|
||||
|
|
@ -243,12 +243,15 @@ impl Diagnostic {
|
|||
|
||||
pub fn is_error(&self) -> bool {
|
||||
match self.level {
|
||||
Level::Bug | Level::DelayedBug | Level::Fatal | Level::Error | Level::FailureNote => {
|
||||
true
|
||||
}
|
||||
Level::Bug
|
||||
| Level::DelayedBug(DelayedBugKind::Normal)
|
||||
| Level::Fatal
|
||||
| Level::Error
|
||||
| Level::FailureNote => true,
|
||||
|
||||
Level::ForceWarning(_)
|
||||
| Level::Warning
|
||||
| Level::DelayedBug(DelayedBugKind::GoodPath)
|
||||
| Level::Note
|
||||
| Level::OnceNote
|
||||
| Level::Help
|
||||
|
|
@ -318,7 +321,7 @@ impl Diagnostic {
|
|||
"downgrade_to_delayed_bug: cannot downgrade {:?} to DelayedBug: not an error",
|
||||
self.level
|
||||
);
|
||||
self.level = Level::DelayedBug;
|
||||
self.level = Level::DelayedBug(DelayedBugKind::Normal);
|
||||
}
|
||||
|
||||
/// Appends a labeled span to the diagnostic.
|
||||
|
|
|
|||
|
|
@ -255,35 +255,13 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
|||
/// Stashes diagnostic for possible later improvement in a different,
|
||||
/// later stage of the compiler. The diagnostic can be accessed with
|
||||
/// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`].
|
||||
///
|
||||
/// As with `buffer`, this is unless the dcx has disabled such buffering.
|
||||
pub fn stash(self, span: Span, key: StashKey) {
|
||||
if let Some((diag, dcx)) = self.into_diagnostic() {
|
||||
dcx.stash_diagnostic(span, key, diag);
|
||||
}
|
||||
self.dcx.stash_diagnostic(span, key, self.into_diagnostic());
|
||||
}
|
||||
|
||||
/// Converts the builder to a `Diagnostic` for later emission,
|
||||
/// unless dcx has disabled such buffering.
|
||||
fn into_diagnostic(mut self) -> Option<(Diagnostic, &'a DiagCtxt)> {
|
||||
if self.dcx.inner.lock().flags.treat_err_as_bug.is_some() {
|
||||
self.emit();
|
||||
return None;
|
||||
}
|
||||
|
||||
let diag = self.take_diag();
|
||||
|
||||
// Logging here is useful to help track down where in logs an error was
|
||||
// actually emitted.
|
||||
debug!("buffer: diag={:?}", diag);
|
||||
|
||||
Some((diag, self.dcx))
|
||||
}
|
||||
|
||||
/// Buffers the diagnostic for later emission,
|
||||
/// unless dcx has disabled such buffering.
|
||||
pub fn buffer(self, buffered_diagnostics: &mut Vec<Diagnostic>) {
|
||||
buffered_diagnostics.extend(self.into_diagnostic().map(|(diag, _)| diag));
|
||||
/// Converts the builder to a `Diagnostic` for later emission.
|
||||
pub fn into_diagnostic(mut self) -> Diagnostic {
|
||||
self.take_diag()
|
||||
}
|
||||
|
||||
/// Delay emission of this diagnostic as a bug.
|
||||
|
|
|
|||
|
|
@ -519,6 +519,12 @@ fn default_track_diagnostic(diag: Diagnostic, f: &mut dyn FnMut(Diagnostic)) {
|
|||
pub static TRACK_DIAGNOSTIC: AtomicRef<fn(Diagnostic, &mut dyn FnMut(Diagnostic))> =
|
||||
AtomicRef::new(&(default_track_diagnostic as _));
|
||||
|
||||
#[derive(Copy, PartialEq, Eq, Clone, Hash, Debug, Encodable, Decodable)]
|
||||
pub enum DelayedBugKind {
|
||||
Normal,
|
||||
GoodPath,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default)]
|
||||
pub struct DiagCtxtFlags {
|
||||
/// If false, warning-level lints are suppressed.
|
||||
|
|
@ -527,6 +533,9 @@ pub struct DiagCtxtFlags {
|
|||
/// If Some, the Nth error-level diagnostic is upgraded to bug-level.
|
||||
/// (rustc: see `-Z treat-err-as-bug`)
|
||||
pub treat_err_as_bug: Option<NonZeroUsize>,
|
||||
/// Eagerly emit delayed bugs as errors, so that the compiler debugger may
|
||||
/// see all of the errors being emitted at once.
|
||||
pub eagerly_emit_delayed_bugs: bool,
|
||||
/// Show macro backtraces.
|
||||
/// (rustc: see `-Z macro-backtrace`)
|
||||
pub macro_backtrace: bool,
|
||||
|
|
@ -541,8 +550,7 @@ impl Drop for DiagCtxtInner {
|
|||
self.emit_stashed_diagnostics();
|
||||
|
||||
if !self.has_errors() {
|
||||
let bugs = std::mem::replace(&mut self.span_delayed_bugs, Vec::new());
|
||||
self.flush_delayed(bugs, "no errors encountered even though `span_delayed_bug` issued");
|
||||
self.flush_delayed(DelayedBugKind::Normal)
|
||||
}
|
||||
|
||||
// FIXME(eddyb) this explains what `good_path_delayed_bugs` are!
|
||||
|
|
@ -551,11 +559,7 @@ impl Drop for DiagCtxtInner {
|
|||
// lints can be `#[allow]`'d, potentially leading to this triggering.
|
||||
// Also, "good path" should be replaced with a better naming.
|
||||
if !self.has_printed && !self.suppressed_expected_diag && !std::thread::panicking() {
|
||||
let bugs = std::mem::replace(&mut self.good_path_delayed_bugs, Vec::new());
|
||||
self.flush_delayed(
|
||||
bugs,
|
||||
"no warnings or errors encountered even though `good_path_delayed_bugs` issued",
|
||||
);
|
||||
self.flush_delayed(DelayedBugKind::GoodPath);
|
||||
}
|
||||
|
||||
if self.check_unstable_expect_diagnostics {
|
||||
|
|
@ -865,7 +869,8 @@ impl DiagCtxt {
|
|||
if treat_next_err_as_bug {
|
||||
self.bug(msg);
|
||||
}
|
||||
DiagnosticBuilder::<ErrorGuaranteed>::new(self, DelayedBug, msg).emit()
|
||||
DiagnosticBuilder::<ErrorGuaranteed>::new(self, DelayedBug(DelayedBugKind::Normal), msg)
|
||||
.emit()
|
||||
}
|
||||
|
||||
/// Like `delayed_bug`, but takes an additional span.
|
||||
|
|
@ -882,16 +887,15 @@ impl DiagCtxt {
|
|||
if treat_next_err_as_bug {
|
||||
self.span_bug(sp, msg);
|
||||
}
|
||||
DiagnosticBuilder::<ErrorGuaranteed>::new(self, DelayedBug, msg).with_span(sp).emit()
|
||||
DiagnosticBuilder::<ErrorGuaranteed>::new(self, DelayedBug(DelayedBugKind::Normal), msg)
|
||||
.with_span(sp)
|
||||
.emit()
|
||||
}
|
||||
|
||||
// FIXME(eddyb) note the comment inside `impl Drop for DiagCtxtInner`, that's
|
||||
// where the explanation of what "good path" is (also, it should be renamed).
|
||||
pub fn good_path_delayed_bug(&self, msg: impl Into<DiagnosticMessage>) {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
let diagnostic = Diagnostic::new(DelayedBug, msg);
|
||||
let backtrace = std::backtrace::Backtrace::capture();
|
||||
inner.good_path_delayed_bugs.push(DelayedDiagnostic::with_backtrace(diagnostic, backtrace));
|
||||
DiagnosticBuilder::<()>::new(self, DelayedBug(DelayedBugKind::GoodPath), msg).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
|
@ -981,6 +985,10 @@ impl DiagCtxt {
|
|||
|
||||
inner.emit_stashed_diagnostics();
|
||||
|
||||
if inner.treat_err_as_bug() {
|
||||
return;
|
||||
}
|
||||
|
||||
let warnings = match inner.deduplicated_warn_count {
|
||||
0 => Cow::from(""),
|
||||
1 => Cow::from("1 warning emitted"),
|
||||
|
|
@ -991,9 +999,6 @@ impl DiagCtxt {
|
|||
1 => Cow::from("aborting due to 1 previous error"),
|
||||
count => Cow::from(format!("aborting due to {count} previous errors")),
|
||||
};
|
||||
if inner.treat_err_as_bug() {
|
||||
return;
|
||||
}
|
||||
|
||||
match (errors.len(), warnings.len()) {
|
||||
(0, 0) => return,
|
||||
|
|
@ -1168,7 +1173,8 @@ impl DiagCtxt {
|
|||
let mut inner = self.inner.borrow_mut();
|
||||
|
||||
if loud && lint_level.is_error() {
|
||||
inner.bump_err_count();
|
||||
inner.err_count += 1;
|
||||
inner.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
inner.emitter.emit_unused_externs(lint_level, unused_externs)
|
||||
|
|
@ -1216,9 +1222,7 @@ impl DiagCtxt {
|
|||
}
|
||||
|
||||
pub fn flush_delayed(&self) {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
let bugs = std::mem::replace(&mut inner.span_delayed_bugs, Vec::new());
|
||||
inner.flush_delayed(bugs, "no errors encountered even though `span_delayed_bug` issued");
|
||||
self.inner.borrow_mut().flush_delayed(DelayedBugKind::Normal);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1255,7 +1259,7 @@ impl DiagCtxtInner {
|
|||
}
|
||||
|
||||
fn emit_diagnostic(&mut self, mut diagnostic: Diagnostic) -> Option<ErrorGuaranteed> {
|
||||
if matches!(diagnostic.level, Error | Fatal) && self.treat_err_as_bug() {
|
||||
if matches!(diagnostic.level, Error | Fatal) && self.treat_next_err_as_bug() {
|
||||
diagnostic.level = Bug;
|
||||
}
|
||||
|
||||
|
|
@ -1268,17 +1272,30 @@ impl DiagCtxtInner {
|
|||
return None;
|
||||
}
|
||||
|
||||
if diagnostic.level == DelayedBug {
|
||||
// FIXME(eddyb) this should check for `has_errors` and stop pushing
|
||||
// once *any* errors were emitted (and truncate `span_delayed_bugs`
|
||||
// when an error is first emitted, also), but maybe there's a case
|
||||
// in which that's not sound? otherwise this is really inefficient.
|
||||
let backtrace = std::backtrace::Backtrace::capture();
|
||||
self.span_delayed_bugs
|
||||
.push(DelayedDiagnostic::with_backtrace(diagnostic.clone(), backtrace));
|
||||
// FIXME(eddyb) this should check for `has_errors` and stop pushing
|
||||
// once *any* errors were emitted (and truncate `span_delayed_bugs`
|
||||
// when an error is first emitted, also), but maybe there's a case
|
||||
// in which that's not sound? otherwise this is really inefficient.
|
||||
match diagnostic.level {
|
||||
DelayedBug(_) if self.flags.eagerly_emit_delayed_bugs => {
|
||||
diagnostic.level = Error;
|
||||
}
|
||||
DelayedBug(DelayedBugKind::Normal) => {
|
||||
let backtrace = std::backtrace::Backtrace::capture();
|
||||
self.span_delayed_bugs
|
||||
.push(DelayedDiagnostic::with_backtrace(diagnostic.clone(), backtrace));
|
||||
|
||||
#[allow(deprecated)]
|
||||
return Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
|
||||
#[allow(deprecated)]
|
||||
return Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
|
||||
}
|
||||
DelayedBug(DelayedBugKind::GoodPath) => {
|
||||
let backtrace = std::backtrace::Backtrace::capture();
|
||||
self.good_path_delayed_bugs
|
||||
.push(DelayedDiagnostic::with_backtrace(diagnostic.clone(), backtrace));
|
||||
|
||||
return None;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if diagnostic.has_future_breakage() {
|
||||
|
|
@ -1353,10 +1370,11 @@ impl DiagCtxtInner {
|
|||
}
|
||||
if diagnostic.is_error() {
|
||||
if diagnostic.is_lint {
|
||||
self.bump_lint_err_count();
|
||||
self.lint_err_count += 1;
|
||||
} else {
|
||||
self.bump_err_count();
|
||||
self.err_count += 1;
|
||||
}
|
||||
self.panic_if_treat_err_as_bug();
|
||||
|
||||
#[allow(deprecated)]
|
||||
{
|
||||
|
|
@ -1393,11 +1411,18 @@ impl DiagCtxtInner {
|
|||
self.emit_diagnostic(Diagnostic::new(FailureNote, msg));
|
||||
}
|
||||
|
||||
fn flush_delayed(
|
||||
&mut self,
|
||||
bugs: Vec<DelayedDiagnostic>,
|
||||
explanation: impl Into<DiagnosticMessage> + Copy,
|
||||
) {
|
||||
fn flush_delayed(&mut self, kind: DelayedBugKind) {
|
||||
let (bugs, explanation) = match kind {
|
||||
DelayedBugKind::Normal => (
|
||||
std::mem::take(&mut self.span_delayed_bugs),
|
||||
"no errors encountered even though `span_delayed_bug` issued",
|
||||
),
|
||||
DelayedBugKind::GoodPath => (
|
||||
std::mem::take(&mut self.good_path_delayed_bugs),
|
||||
"no warnings or errors encountered even though `good_path_delayed_bugs` issued",
|
||||
),
|
||||
};
|
||||
|
||||
if bugs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
|
@ -1430,7 +1455,7 @@ impl DiagCtxtInner {
|
|||
if backtrace || self.ice_file.is_none() { bug.decorate() } else { bug.inner };
|
||||
|
||||
// "Undelay" the `DelayedBug`s (into plain `Bug`s).
|
||||
if bug.level != DelayedBug {
|
||||
if !matches!(bug.level, DelayedBug(_)) {
|
||||
// NOTE(eddyb) not panicking here because we're already producing
|
||||
// an ICE, and the more information the merrier.
|
||||
bug.subdiagnostic(InvalidFlushedDelayedDiagnosticLevel {
|
||||
|
|
@ -1447,16 +1472,6 @@ impl DiagCtxtInner {
|
|||
panic::panic_any(DelayedBugPanic);
|
||||
}
|
||||
|
||||
fn bump_lint_err_count(&mut self) {
|
||||
self.lint_err_count += 1;
|
||||
self.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
fn bump_err_count(&mut self) {
|
||||
self.err_count += 1;
|
||||
self.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
fn panic_if_treat_err_as_bug(&self) {
|
||||
if self.treat_err_as_bug() {
|
||||
match (
|
||||
|
|
@ -1528,8 +1543,9 @@ pub enum Level {
|
|||
/// silently dropped. I.e. "expect other errors are emitted" semantics. Useful on code paths
|
||||
/// that should only be reached when compiling erroneous code.
|
||||
///
|
||||
/// Its `EmissionGuarantee` is `ErrorGuaranteed`.
|
||||
DelayedBug,
|
||||
/// Its `EmissionGuarantee` is `ErrorGuaranteed` for `Normal` delayed bugs, and `()` for
|
||||
/// `GoodPath` delayed bugs.
|
||||
DelayedBug(DelayedBugKind),
|
||||
|
||||
/// An error that causes an immediate abort. Used for things like configuration errors,
|
||||
/// internal overflows, some file operation errors.
|
||||
|
|
@ -1604,7 +1620,7 @@ impl Level {
|
|||
fn color(self) -> ColorSpec {
|
||||
let mut spec = ColorSpec::new();
|
||||
match self {
|
||||
Bug | DelayedBug | Fatal | Error => {
|
||||
Bug | DelayedBug(_) | Fatal | Error => {
|
||||
spec.set_fg(Some(Color::Red)).set_intense(true);
|
||||
}
|
||||
ForceWarning(_) | Warning => {
|
||||
|
|
@ -1624,7 +1640,7 @@ impl Level {
|
|||
|
||||
pub fn to_str(self) -> &'static str {
|
||||
match self {
|
||||
Bug | DelayedBug => "error: internal compiler error",
|
||||
Bug | DelayedBug(_) => "error: internal compiler error",
|
||||
Fatal | Error => "error",
|
||||
ForceWarning(_) | Warning => "warning",
|
||||
Note | OnceNote => "note",
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ use termcolor::{BufferWriter, ColorChoice};
|
|||
use super::*;
|
||||
|
||||
const INPUT: &str = include_str!("input.md");
|
||||
const OUTPUT_PATH: &[&str] = &[env!("CARGO_MANIFEST_DIR"), "src","markdown","tests","output.stdout"];
|
||||
const OUTPUT_PATH: &[&str] =
|
||||
&[env!("CARGO_MANIFEST_DIR"), "src", "markdown", "tests", "output.stdout"];
|
||||
|
||||
const TEST_WIDTH: usize = 80;
|
||||
|
||||
|
|
@ -34,7 +35,7 @@ quis dolor non venenatis. Aliquam ut. ";
|
|||
fn test_wrapping_write() {
|
||||
WIDTH.with(|w| w.set(TEST_WIDTH));
|
||||
let mut buf = BufWriter::new(Vec::new());
|
||||
let txt = TXT.replace("-\n","-").replace("_\n","_").replace('\n', " ").replace(" ", "");
|
||||
let txt = TXT.replace("-\n", "-").replace("_\n", "_").replace('\n', " ").replace(" ", "");
|
||||
write_wrapping(&mut buf, &txt, 0, None).unwrap();
|
||||
write_wrapping(&mut buf, &txt, 4, None).unwrap();
|
||||
write_wrapping(
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ declare_features! (
|
|||
/// Allows empty structs and enum variants with braces.
|
||||
(accepted, braced_empty_structs, "1.8.0", Some(29720)),
|
||||
/// Allows `c"foo"` literals.
|
||||
(accepted, c_str_literals, "1.76.0", Some(105723)),
|
||||
(accepted, c_str_literals, "CURRENT_RUSTC_VERSION", Some(105723)),
|
||||
/// Allows `#[cfg_attr(predicate, multiple, attributes, here)]`.
|
||||
(accepted, cfg_attr_multi, "1.33.0", Some(54881)),
|
||||
/// Allows the use of `#[cfg(doctest)]`, set when rustdoc is collecting doctests.
|
||||
|
|
|
|||
|
|
@ -450,7 +450,7 @@ declare_features! (
|
|||
(unstable, doc_masked, "1.21.0", Some(44027)),
|
||||
/// Allows `dyn* Trait` objects.
|
||||
(incomplete, dyn_star, "1.65.0", Some(102425)),
|
||||
// Uses generic effect parameters for ~const bounds
|
||||
/// Uses generic effect parameters for ~const bounds
|
||||
(unstable, effects, "1.72.0", Some(102090)),
|
||||
/// Allows `X..Y` patterns.
|
||||
(unstable, exclusive_range_pattern, "1.11.0", Some(37854)),
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ proc-macro = true
|
|||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
annotate-snippets = "0.9"
|
||||
annotate-snippets = "0.10"
|
||||
fluent-bundle = "0.15.2"
|
||||
fluent-syntax = "0.11"
|
||||
proc-macro2 = "1"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,4 @@
|
|||
use annotate_snippets::{
|
||||
display_list::DisplayList,
|
||||
snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation},
|
||||
};
|
||||
use annotate_snippets::{Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation};
|
||||
use fluent_bundle::{FluentBundle, FluentError, FluentResource};
|
||||
use fluent_syntax::{
|
||||
ast::{
|
||||
|
|
@ -179,10 +176,9 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
|||
range: (pos.start, pos.end - 1),
|
||||
}],
|
||||
}],
|
||||
opt: Default::default(),
|
||||
};
|
||||
let dl = DisplayList::from(snippet);
|
||||
eprintln!("{dl}\n");
|
||||
let renderer = Renderer::plain();
|
||||
eprintln!("{}\n", renderer.render(snippet));
|
||||
}
|
||||
|
||||
return failed(&crate_name);
|
||||
|
|
|
|||
|
|
@ -300,13 +300,15 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
|||
.expect("missing associated item");
|
||||
|
||||
if !assoc_item.visibility(tcx).is_accessible_from(def_scope, tcx) {
|
||||
tcx.dcx()
|
||||
let reported = tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
binding.span,
|
||||
format!("{} `{}` is private", assoc_item.kind, binding.item_name),
|
||||
)
|
||||
.with_span_label(binding.span, format!("private {}", assoc_item.kind))
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
tcx.check_stability(assoc_item.def_id, Some(hir_ref_id), binding.span, None);
|
||||
|
||||
|
|
|
|||
|
|
@ -354,7 +354,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
);
|
||||
err.span_label(name.span, format!("multiple `{name}` found"));
|
||||
self.note_ambiguous_inherent_assoc_type(&mut err, candidates, span);
|
||||
err.emit()
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
reported
|
||||
}
|
||||
|
||||
// FIXME(fmease): Heavily adapted from `rustc_hir_typeck::method::suggest`. Deduplicate.
|
||||
|
|
@ -843,7 +845,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -390,6 +390,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
infer_args,
|
||||
);
|
||||
|
||||
if let Err(err) = &arg_count.correct
|
||||
&& let Some(reported) = err.reported
|
||||
{
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
|
||||
// Skip processing if type has no generic parameters.
|
||||
// Traits always have `Self` as a generic parameter, which means they will not return early
|
||||
// here and so associated type bindings will be handled regardless of whether there are any
|
||||
|
|
@ -568,6 +574,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span,
|
||||
modifier: constness.as_str(),
|
||||
});
|
||||
self.set_tainted_by_errors(e);
|
||||
arg_count.correct =
|
||||
Err(GenericArgCountMismatch { reported: Some(e), invalid_args: vec![] });
|
||||
}
|
||||
|
|
@ -966,7 +973,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
}
|
||||
}
|
||||
err.emit()
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
reported
|
||||
}
|
||||
|
||||
// Search for a bound on a type parameter which includes the associated item
|
||||
|
|
@ -1043,6 +1052,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span,
|
||||
binding,
|
||||
);
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Err(reported);
|
||||
};
|
||||
debug!(?bound);
|
||||
|
|
@ -1120,6 +1130,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
));
|
||||
}
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
if !where_bounds.is_empty() {
|
||||
return Err(reported);
|
||||
}
|
||||
|
|
@ -1374,6 +1385,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
assoc_ident.name,
|
||||
)
|
||||
};
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Err(reported);
|
||||
}
|
||||
};
|
||||
|
|
@ -1616,12 +1628,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
let kind = tcx.def_kind_descr(kind, item);
|
||||
let msg = format!("{kind} `{name}` is private");
|
||||
let def_span = tcx.def_span(item);
|
||||
tcx.dcx()
|
||||
let reported = tcx
|
||||
.dcx()
|
||||
.struct_span_err(span, msg)
|
||||
.with_code(rustc_errors::error_code!(E0624))
|
||||
.with_span_label(span, format!("private {kind}"))
|
||||
.with_span_label(def_span, format!("{kind} defined here"))
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
tcx.check_stability(item, Some(block), span, None);
|
||||
}
|
||||
|
|
@ -1862,7 +1876,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
err.span_label(span, format!("not allowed on {what}"));
|
||||
}
|
||||
extend(&mut err);
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
emitted = true;
|
||||
}
|
||||
|
||||
|
|
@ -2184,7 +2198,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
{
|
||||
err.span_note(impl_.self_ty.span, "not a concrete type");
|
||||
}
|
||||
Ty::new_error(tcx, err.emit())
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
Ty::new_error(tcx, reported)
|
||||
} else {
|
||||
ty
|
||||
}
|
||||
|
|
@ -2586,7 +2602,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
);
|
||||
}
|
||||
|
||||
diag.emit();
|
||||
self.set_tainted_by_errors(diag.emit());
|
||||
}
|
||||
|
||||
// Find any late-bound regions declared in return type that do
|
||||
|
|
@ -2686,7 +2702,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
err.note("consider introducing a named lifetime parameter");
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2725,7 +2741,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
// error.
|
||||
let r = derived_region_bounds[0];
|
||||
if derived_region_bounds[1..].iter().any(|r1| r != *r1) {
|
||||
tcx.dcx().emit_err(AmbiguousLifetimeBound { span });
|
||||
self.set_tainted_by_errors(tcx.dcx().emit_err(AmbiguousLifetimeBound { span }));
|
||||
}
|
||||
Some(r)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
for more information on them, visit \
|
||||
<https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits>",
|
||||
);
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
|
||||
if regular_traits.is_empty() && auto_traits.is_empty() {
|
||||
|
|
@ -127,6 +127,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
.map(|trait_ref| tcx.def_span(trait_ref));
|
||||
let reported =
|
||||
tcx.dcx().emit_err(TraitObjectDeclaredWithNoTraits { span, trait_alias_span });
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Ty::new_error(tcx, reported);
|
||||
}
|
||||
|
||||
|
|
@ -290,7 +291,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
|
||||
if references_self {
|
||||
let def_id = i.bottom().0.def_id();
|
||||
struct_span_code_err!(
|
||||
let reported = struct_span_code_err!(
|
||||
tcx.dcx(),
|
||||
i.bottom().1,
|
||||
E0038,
|
||||
|
|
@ -303,6 +304,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
.error_msg(),
|
||||
)
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
|
||||
ty::ExistentialTraitRef { def_id: trait_ref.def_id, args }
|
||||
|
|
@ -389,6 +391,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
} else {
|
||||
err.emit()
|
||||
};
|
||||
self.set_tainted_by_errors(e);
|
||||
ty::Region::new_error(tcx, e)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ impl<'a, 'tcx> Iterator for Autoderef<'a, 'tcx> {
|
|||
// we have some type like `&<Ty as Trait>::Assoc`, since users of
|
||||
// autoderef expect this type to have been structurally normalized.
|
||||
if self.infcx.next_trait_solver()
|
||||
&& let ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) = ty.kind()
|
||||
&& let ty::Alias(..) = ty.kind()
|
||||
{
|
||||
let (normalized_ty, obligations) = self.structurally_normalize(ty)?;
|
||||
self.state.obligations.extend(obligations);
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ use rustc_target::spec::abi;
|
|||
use rustc_trait_selection::infer::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::error_reporting::suggestions::NextTypeParamName;
|
||||
use rustc_trait_selection::traits::ObligationCtxt;
|
||||
use std::cell::Cell;
|
||||
use std::iter;
|
||||
use std::ops::Bound;
|
||||
|
||||
|
|
@ -119,6 +120,7 @@ pub fn provide(providers: &mut Providers) {
|
|||
pub struct ItemCtxt<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
item_def_id: LocalDefId,
|
||||
tainted_by_errors: Cell<Option<ErrorGuaranteed>>,
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
|
@ -343,7 +345,7 @@ fn bad_placeholder<'tcx>(
|
|||
|
||||
impl<'tcx> ItemCtxt<'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'tcx>, item_def_id: LocalDefId) -> ItemCtxt<'tcx> {
|
||||
ItemCtxt { tcx, item_def_id }
|
||||
ItemCtxt { tcx, item_def_id, tainted_by_errors: Cell::new(None) }
|
||||
}
|
||||
|
||||
pub fn to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
|
|
@ -357,6 +359,13 @@ impl<'tcx> ItemCtxt<'tcx> {
|
|||
pub fn node(&self) -> hir::Node<'tcx> {
|
||||
self.tcx.hir_node(self.hir_id())
|
||||
}
|
||||
|
||||
fn check_tainted_by_errors(&self) -> Result<(), ErrorGuaranteed> {
|
||||
match self.tainted_by_errors.get() {
|
||||
Some(err) => Err(err),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
|
||||
|
|
@ -492,8 +501,8 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
|
|||
ty.ty_adt_def()
|
||||
}
|
||||
|
||||
fn set_tainted_by_errors(&self, _: ErrorGuaranteed) {
|
||||
// There's no obvious place to track this, so just let it go.
|
||||
fn set_tainted_by_errors(&self, err: ErrorGuaranteed) {
|
||||
self.tainted_by_errors.set(Some(err));
|
||||
}
|
||||
|
||||
fn record_ty(&self, _hir_id: hir::HirId, _ty: Ty<'tcx>, _span: Span) {
|
||||
|
|
|
|||
|
|
@ -513,7 +513,11 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<Ty
|
|||
bug!("unexpected sort of node in type_of(): {:?}", x);
|
||||
}
|
||||
};
|
||||
ty::EarlyBinder::bind(output)
|
||||
if let Err(e) = icx.check_tainted_by_errors() {
|
||||
ty::EarlyBinder::bind(Ty::new_error(tcx, e))
|
||||
} else {
|
||||
ty::EarlyBinder::bind(output)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn type_of_opaque(
|
||||
|
|
|
|||
|
|
@ -802,7 +802,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
.explicit_item_bounds(def_id)
|
||||
.iter_instantiated_copied(self.tcx, args)
|
||||
.find_map(|(p, s)| get_future_output(p.as_predicate(), s))?,
|
||||
ty::Error(_) => return None,
|
||||
ty::Error(_) => return Some(ret_ty),
|
||||
_ => span_bug!(
|
||||
closure_span,
|
||||
"async fn coroutine return type not an inference variable: {ret_ty}"
|
||||
|
|
|
|||
|
|
@ -498,14 +498,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
|||
// order when emitting them.
|
||||
let err =
|
||||
self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
|
||||
err.buffer(&mut errors_buffer);
|
||||
errors_buffer.push(err);
|
||||
}
|
||||
}
|
||||
|
||||
if !errors_buffer.is_empty() {
|
||||
errors_buffer.sort_by_key(|diag| diag.span.primary_span());
|
||||
for diag in errors_buffer {
|
||||
self.tcx().dcx().emit_diagnostic(diag);
|
||||
for err in errors_buffer {
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec<String>) -> Cfg {
|
|||
Ok(..) => {}
|
||||
Err(err) => err.cancel(),
|
||||
},
|
||||
Err(errs) => drop(errs),
|
||||
Err(errs) => errs.into_iter().for_each(|err| err.cancel()),
|
||||
}
|
||||
|
||||
// If the user tried to use a key="value" flag, but is missing the quotes, provide
|
||||
|
|
@ -129,9 +129,12 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec<String>) -> CheckCfg {
|
|||
error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
|
||||
};
|
||||
|
||||
let Ok(mut parser) = maybe_new_parser_from_source_str(&sess, filename, s.to_string())
|
||||
else {
|
||||
expected_error();
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) {
|
||||
Ok(parser) => parser,
|
||||
Err(errs) => {
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
expected_error();
|
||||
}
|
||||
};
|
||||
|
||||
let meta_item = match parser.parse_meta_item() {
|
||||
|
|
|
|||
|
|
@ -532,8 +532,14 @@ lint_unknown_gated_lint =
|
|||
|
||||
lint_unknown_lint =
|
||||
unknown lint: `{$name}`
|
||||
.suggestion = did you mean
|
||||
.help = did you mean: `{$replace}`
|
||||
.suggestion = {$from_rustc ->
|
||||
[true] a lint with a similar name exists in `rustc` lints
|
||||
*[false] did you mean
|
||||
}
|
||||
.help = {$from_rustc ->
|
||||
[true] a lint with a similar name exists in `rustc` lints: `{$replace}`
|
||||
*[false] did you mean: `{$replace}`
|
||||
}
|
||||
|
||||
lint_unknown_tool_in_scoped_lint = unknown tool name `{$tool_name}` found in scoped lint: `{$tool_name}::{$lint_name}`
|
||||
.help = add `#![register_tool({$tool_name})]` to the crate root
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ use rustc_middle::ty::{self, print::Printer, GenericArg, RegisteredTools, Ty, Ty
|
|||
use rustc_session::lint::{BuiltinLintDiagnostics, LintExpectationId};
|
||||
use rustc_session::lint::{FutureIncompatibleInfo, Level, Lint, LintBuffer, LintId};
|
||||
use rustc_session::{LintStoreMarker, Session};
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::edit_distance::find_best_match_for_names;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
use rustc_target::abi;
|
||||
|
|
@ -117,7 +117,7 @@ struct LintGroup {
|
|||
pub enum CheckLintNameResult<'a> {
|
||||
Ok(&'a [LintId]),
|
||||
/// Lint doesn't exist. Potentially contains a suggestion for a correct lint name.
|
||||
NoLint(Option<Symbol>),
|
||||
NoLint(Option<(Symbol, bool)>),
|
||||
/// The lint refers to a tool that has not been registered.
|
||||
NoTool,
|
||||
/// The lint has been renamed to a new name.
|
||||
|
|
@ -377,7 +377,7 @@ impl LintStore {
|
|||
debug!("lints={:?}", self.by_name.keys().collect::<Vec<_>>());
|
||||
let tool_prefix = format!("{tool_name}::");
|
||||
return if self.by_name.keys().any(|lint| lint.starts_with(&tool_prefix)) {
|
||||
self.no_lint_suggestion(&complete_name)
|
||||
self.no_lint_suggestion(&complete_name, tool_name.as_str())
|
||||
} else {
|
||||
// 2. The tool isn't currently running, so no lints will be registered.
|
||||
// To avoid giving a false positive, ignore all unknown lints.
|
||||
|
|
@ -419,13 +419,14 @@ impl LintStore {
|
|||
}
|
||||
}
|
||||
|
||||
fn no_lint_suggestion(&self, lint_name: &str) -> CheckLintNameResult<'_> {
|
||||
fn no_lint_suggestion(&self, lint_name: &str, tool_name: &str) -> CheckLintNameResult<'_> {
|
||||
let name_lower = lint_name.to_lowercase();
|
||||
|
||||
if lint_name.chars().any(char::is_uppercase) && self.find_lints(&name_lower).is_ok() {
|
||||
// First check if the lint name is (partly) in upper case instead of lower case...
|
||||
return CheckLintNameResult::NoLint(Some(Symbol::intern(&name_lower)));
|
||||
return CheckLintNameResult::NoLint(Some((Symbol::intern(&name_lower), false)));
|
||||
}
|
||||
|
||||
// ...if not, search for lints with a similar name
|
||||
// Note: find_best_match_for_name depends on the sort order of its input vector.
|
||||
// To ensure deterministic output, sort elements of the lint_groups hash map.
|
||||
|
|
@ -441,7 +442,16 @@ impl LintStore {
|
|||
let groups = groups.iter().map(|k| Symbol::intern(k));
|
||||
let lints = self.lints.iter().map(|l| Symbol::intern(&l.name_lower()));
|
||||
let names: Vec<Symbol> = groups.chain(lints).collect();
|
||||
let suggestion = find_best_match_for_name(&names, Symbol::intern(&name_lower), None);
|
||||
let mut lookups = vec![Symbol::intern(&name_lower)];
|
||||
if let Some(stripped) = name_lower.split("::").last() {
|
||||
lookups.push(Symbol::intern(stripped));
|
||||
}
|
||||
let res = find_best_match_for_names(&names, &lookups, None);
|
||||
let is_rustc = res.map_or_else(
|
||||
|| false,
|
||||
|s| name_lower.contains("::") && !s.as_str().starts_with(tool_name),
|
||||
);
|
||||
let suggestion = res.map(|s| (s, is_rustc));
|
||||
CheckLintNameResult::NoLint(suggestion)
|
||||
}
|
||||
|
||||
|
|
@ -454,7 +464,7 @@ impl LintStore {
|
|||
match self.by_name.get(&complete_name) {
|
||||
None => match self.lint_groups.get(&*complete_name) {
|
||||
// Now we are sure, that this lint exists nowhere
|
||||
None => self.no_lint_suggestion(lint_name),
|
||||
None => self.no_lint_suggestion(lint_name, tool_name),
|
||||
Some(LintGroup { lint_ids, depr, .. }) => {
|
||||
// Reaching this would be weird, but let's cover this case anyway
|
||||
if let Some(LintAlias { name, silent }) = depr {
|
||||
|
|
|
|||
|
|
@ -356,6 +356,12 @@ pub(super) fn builtin(
|
|||
}
|
||||
}
|
||||
|
||||
// We don't want to suggest adding values to well known names
|
||||
// since those are defined by rustc it-self. Users can still
|
||||
// do it if they want, but should not encourage them.
|
||||
let is_cfg_a_well_know_name =
|
||||
sess.parse_sess.check_config.well_known_names.contains(&name);
|
||||
|
||||
let inst = if let Some((value, _value_span)) = value {
|
||||
let pre = if is_from_cargo { "\\" } else { "" };
|
||||
format!("cfg({name}, values({pre}\"{value}{pre}\"))")
|
||||
|
|
@ -368,12 +374,14 @@ pub(super) fn builtin(
|
|||
if let Some((value, _value_span)) = value {
|
||||
db.help(format!("consider adding `{value}` as a feature in `Cargo.toml`"));
|
||||
}
|
||||
} else {
|
||||
} else if !is_cfg_a_well_know_name {
|
||||
db.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`"));
|
||||
}
|
||||
db.note("see <https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg> for more information about checking conditional configuration");
|
||||
} else {
|
||||
db.help(format!("to expect this configuration use `--check-cfg={inst}`"));
|
||||
if !is_cfg_a_well_know_name {
|
||||
db.help(format!("to expect this configuration use `--check-cfg={inst}`"));
|
||||
}
|
||||
db.note("see <https://doc.rust-lang.org/nightly/unstable-book/compiler-flags/check-cfg.html> for more information about checking conditional configuration");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -582,8 +582,9 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
|
|||
}
|
||||
CheckLintNameResult::NoLint(suggestion) => {
|
||||
let name = lint_name.clone();
|
||||
let suggestion =
|
||||
suggestion.map(|replace| UnknownLintSuggestion::WithoutSpan { replace });
|
||||
let suggestion = suggestion.map(|(replace, from_rustc)| {
|
||||
UnknownLintSuggestion::WithoutSpan { replace, from_rustc }
|
||||
});
|
||||
let requested_level = RequestedLevel { level, lint_name };
|
||||
let lint = UnknownLintFromCommandLine { name, suggestion, requested_level };
|
||||
self.emit_lint(UNKNOWN_LINTS, lint);
|
||||
|
|
@ -990,8 +991,8 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
|
|||
} else {
|
||||
name.to_string()
|
||||
};
|
||||
let suggestion = suggestion.map(|replace| {
|
||||
UnknownLintSuggestion::WithSpan { suggestion: sp, replace }
|
||||
let suggestion = suggestion.map(|(replace, from_rustc)| {
|
||||
UnknownLintSuggestion::WithSpan { suggestion: sp, replace, from_rustc }
|
||||
});
|
||||
let lint = UnknownLint { name, suggestion };
|
||||
self.emit_spanned_lint(UNKNOWN_LINTS, sp.into(), lint);
|
||||
|
|
|
|||
|
|
@ -1050,9 +1050,10 @@ pub enum UnknownLintSuggestion {
|
|||
#[primary_span]
|
||||
suggestion: Span,
|
||||
replace: Symbol,
|
||||
from_rustc: bool,
|
||||
},
|
||||
#[help(lint_help)]
|
||||
WithoutSpan { replace: Symbol },
|
||||
WithoutSpan { replace: Symbol, from_rustc: bool },
|
||||
}
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
|
|
|
|||
|
|
@ -1071,17 +1071,31 @@ impl UnusedParens {
|
|||
self.emit_unused_delims(cx, value.span, spans, "pattern", keep_space, false);
|
||||
}
|
||||
}
|
||||
|
||||
fn cast_followed_by_lt(&self, expr: &ast::Expr) -> Option<ast::NodeId> {
|
||||
if let ExprKind::Binary(op, lhs, _rhs) = &expr.kind
|
||||
&& (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl)
|
||||
{
|
||||
let mut cur = lhs;
|
||||
while let ExprKind::Binary(_, _, rhs) = &cur.kind {
|
||||
cur = rhs;
|
||||
}
|
||||
|
||||
if let ExprKind::Cast(_, ty) = &cur.kind
|
||||
&& let ast::TyKind::Paren(_) = &ty.kind
|
||||
{
|
||||
return Some(ty.id);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl EarlyLintPass for UnusedParens {
|
||||
#[inline]
|
||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
|
||||
if let ExprKind::Binary(op, lhs, _rhs) = &e.kind
|
||||
&& (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl)
|
||||
&& let ExprKind::Cast(_expr, ty) = &lhs.kind
|
||||
&& let ast::TyKind::Paren(_) = &ty.kind
|
||||
{
|
||||
self.parens_in_cast_in_lt.push(ty.id);
|
||||
if let Some(ty_id) = self.cast_followed_by_lt(e) {
|
||||
self.parens_in_cast_in_lt.push(ty_id);
|
||||
}
|
||||
|
||||
match e.kind {
|
||||
|
|
@ -1133,17 +1147,13 @@ impl EarlyLintPass for UnusedParens {
|
|||
}
|
||||
|
||||
fn check_expr_post(&mut self, _cx: &EarlyContext<'_>, e: &ast::Expr) {
|
||||
if let ExprKind::Binary(op, lhs, _rhs) = &e.kind
|
||||
&& (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl)
|
||||
&& let ExprKind::Cast(_expr, ty) = &lhs.kind
|
||||
&& let ast::TyKind::Paren(_) = &ty.kind
|
||||
{
|
||||
if let Some(ty_id) = self.cast_followed_by_lt(e) {
|
||||
let id = self
|
||||
.parens_in_cast_in_lt
|
||||
.pop()
|
||||
.expect("check_expr and check_expr_post must balance");
|
||||
assert_eq!(
|
||||
id, ty.id,
|
||||
id, ty_id,
|
||||
"check_expr, check_ty, and check_expr_post are called, in that order, by the visitor"
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -531,9 +531,12 @@ extern "C" void LLVMRustDisposeTargetMachine(LLVMTargetMachineRef TM) {
|
|||
|
||||
// Unfortunately, the LLVM C API doesn't provide a way to create the
|
||||
// TargetLibraryInfo pass, so we use this method to do so.
|
||||
extern "C" void LLVMRustAddLibraryInfo(LLVMPassManagerRef PMR, LLVMModuleRef M) {
|
||||
extern "C" void LLVMRustAddLibraryInfo(LLVMPassManagerRef PMR, LLVMModuleRef M,
|
||||
bool DisableSimplifyLibCalls) {
|
||||
Triple TargetTriple(unwrap(M)->getTargetTriple());
|
||||
TargetLibraryInfoImpl TLII(TargetTriple);
|
||||
if (DisableSimplifyLibCalls)
|
||||
TLII.disableAllFunctions();
|
||||
unwrap(PMR)->add(new TargetLibraryInfoWrapperPass(TLII));
|
||||
}
|
||||
|
||||
|
|
@ -700,7 +703,7 @@ LLVMRustOptimize(
|
|||
bool IsLinkerPluginLTO,
|
||||
bool NoPrepopulatePasses, bool VerifyIR, bool UseThinLTOBuffers,
|
||||
bool MergeFunctions, bool UnrollLoops, bool SLPVectorize, bool LoopVectorize,
|
||||
bool EmitLifetimeMarkers,
|
||||
bool DisableSimplifyLibCalls, bool EmitLifetimeMarkers,
|
||||
LLVMRustSanitizerOptions *SanitizerOptions,
|
||||
const char *PGOGenPath, const char *PGOUsePath,
|
||||
bool InstrumentCoverage, const char *InstrProfileOutput,
|
||||
|
|
@ -800,6 +803,8 @@ LLVMRustOptimize(
|
|||
|
||||
Triple TargetTriple(TheModule->getTargetTriple());
|
||||
std::unique_ptr<TargetLibraryInfoImpl> TLII(new TargetLibraryInfoImpl(TargetTriple));
|
||||
if (DisableSimplifyLibCalls)
|
||||
TLII->disableAllFunctions();
|
||||
FAM.registerPass([&] { return TargetLibraryAnalysis(*TLII); });
|
||||
|
||||
PB.registerModuleAnalyses(MAM);
|
||||
|
|
|
|||
|
|
@ -76,8 +76,17 @@ fn decodable_body(
|
|||
ty_name,
|
||||
variants.len()
|
||||
);
|
||||
let tag = if variants.len() < u8::MAX as usize {
|
||||
quote! {
|
||||
::rustc_serialize::Decoder::read_u8(__decoder) as usize
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
::rustc_serialize::Decoder::read_usize(__decoder)
|
||||
}
|
||||
};
|
||||
quote! {
|
||||
match ::rustc_serialize::Decoder::read_usize(__decoder) {
|
||||
match #tag {
|
||||
#match_inner
|
||||
n => panic!(#message, n),
|
||||
}
|
||||
|
|
@ -206,11 +215,20 @@ fn encodable_body(
|
|||
variant_idx += 1;
|
||||
result
|
||||
});
|
||||
quote! {
|
||||
let disc = match *self {
|
||||
#encode_inner
|
||||
};
|
||||
::rustc_serialize::Encoder::emit_usize(__encoder, disc);
|
||||
if variant_idx < u8::MAX as usize {
|
||||
quote! {
|
||||
let disc = match *self {
|
||||
#encode_inner
|
||||
};
|
||||
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
let disc = match *self {
|
||||
#encode_inner
|
||||
};
|
||||
::rustc_serialize::Encoder::emit_usize(__encoder, disc);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -113,6 +113,7 @@ macro_rules! arena_types {
|
|||
[] stripped_cfg_items: rustc_ast::expand::StrippedCfgItem,
|
||||
[] mod_child: rustc_middle::metadata::ModChild,
|
||||
[] features: rustc_feature::Features,
|
||||
[decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph,
|
||||
]);
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,12 +35,7 @@ pub enum SymbolExportKind {
|
|||
pub struct SymbolExportInfo {
|
||||
pub level: SymbolExportLevel,
|
||||
pub kind: SymbolExportKind,
|
||||
/// Used to mark these symbols not to be internalized by LTO. These symbols
|
||||
/// are also added to `symbols.o` to avoid circular dependencies when linking.
|
||||
pub used: bool,
|
||||
/// Also used to mark these symbols not to be internalized by LTO. But will
|
||||
/// not be added to `symbols.o`. Currently there are only builtin functions.
|
||||
pub used_compiler: bool,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)]
|
||||
|
|
|
|||
|
|
@ -1294,8 +1294,7 @@ rustc_queries! {
|
|||
desc { |tcx| "finding trait impls of `{}`", tcx.def_path_str(trait_id) }
|
||||
}
|
||||
|
||||
query specialization_graph_of(trait_id: DefId) -> &'tcx specialization_graph::Graph {
|
||||
arena_cache
|
||||
query specialization_graph_of(trait_id: DefId) -> Result<&'tcx specialization_graph::Graph, ErrorGuaranteed> {
|
||||
desc { |tcx| "building specialization graph of trait `{}`", tcx.def_path_str(trait_id) }
|
||||
cache_on_disk_if { true }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -786,6 +786,15 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsm
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
||||
for &'tcx crate::traits::specialization_graph::Graph
|
||||
{
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_ref_decoder {
|
||||
(<$tcx:tt> $($ty:ty,)*) => {
|
||||
$(impl<'a, $tcx> Decodable<CacheDecoder<'a, $tcx>> for &$tcx [$ty] {
|
||||
|
|
|
|||
|
|
@ -73,6 +73,7 @@ pub struct CanonicalGoalEvaluation<'tcx> {
|
|||
pub enum CanonicalGoalEvaluationKind<'tcx> {
|
||||
Overflow,
|
||||
CycleInStack,
|
||||
ProvisionalCacheHit,
|
||||
Evaluation { revisions: &'tcx [GoalEvaluationStep<'tcx>] },
|
||||
}
|
||||
impl Debug for GoalEvaluation<'_> {
|
||||
|
|
|
|||
|
|
@ -77,6 +77,9 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> {
|
|||
CanonicalGoalEvaluationKind::CycleInStack => {
|
||||
writeln!(self.f, "CYCLE IN STACK: {:?}", eval.result)
|
||||
}
|
||||
CanonicalGoalEvaluationKind::ProvisionalCacheHit => {
|
||||
writeln!(self.f, "PROVISIONAL CACHE HIT: {:?}", eval.result)
|
||||
}
|
||||
CanonicalGoalEvaluationKind::Evaluation { revisions } => {
|
||||
for (n, step) in revisions.iter().enumerate() {
|
||||
writeln!(self.f, "REVISION {n}")?;
|
||||
|
|
|
|||
|
|
@ -30,18 +30,16 @@ pub struct Graph {
|
|||
|
||||
/// The "root" impls are found by looking up the trait's def_id.
|
||||
pub children: DefIdMap<Children>,
|
||||
|
||||
/// Whether an error was emitted while constructing the graph.
|
||||
pub has_errored: Option<ErrorGuaranteed>,
|
||||
}
|
||||
|
||||
impl Graph {
|
||||
pub fn new() -> Graph {
|
||||
Graph { parent: Default::default(), children: Default::default(), has_errored: None }
|
||||
Graph { parent: Default::default(), children: Default::default() }
|
||||
}
|
||||
|
||||
/// The parent of a given impl, which is the `DefId` of the trait when the
|
||||
/// impl is a "specialization root".
|
||||
#[track_caller]
|
||||
pub fn parent(&self, child: DefId) -> DefId {
|
||||
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {child:?}"))
|
||||
}
|
||||
|
|
@ -255,13 +253,9 @@ pub fn ancestors(
|
|||
trait_def_id: DefId,
|
||||
start_from_impl: DefId,
|
||||
) -> Result<Ancestors<'_>, ErrorGuaranteed> {
|
||||
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
|
||||
let specialization_graph = tcx.specialization_graph_of(trait_def_id)?;
|
||||
|
||||
if let Some(reported) = specialization_graph.has_errored {
|
||||
Err(reported)
|
||||
} else if let Err(reported) =
|
||||
tcx.type_of(start_from_impl).instantiate_identity().error_reported()
|
||||
{
|
||||
if let Err(reported) = tcx.type_of(start_from_impl).instantiate_identity().error_reported() {
|
||||
Err(reported)
|
||||
} else {
|
||||
Ok(Ancestors {
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ pub enum SimplifiedType {
|
|||
CoroutineWitness(DefId),
|
||||
Function(usize),
|
||||
Placeholder,
|
||||
Error,
|
||||
}
|
||||
|
||||
/// Generic parameters are pretty much just bound variables, e.g.
|
||||
|
|
@ -153,7 +154,8 @@ pub fn simplify_type<'tcx>(
|
|||
TreatParams::ForLookup | TreatParams::AsCandidateKey => None,
|
||||
},
|
||||
ty::Foreign(def_id) => Some(SimplifiedType::Foreign(def_id)),
|
||||
ty::Bound(..) | ty::Infer(_) | ty::Error(_) => None,
|
||||
ty::Error(_) => Some(SimplifiedType::Error),
|
||||
ty::Bound(..) | ty::Infer(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use crate::traits::specialization_graph;
|
||||
use crate::ty::fast_reject::{self, SimplifiedType, TreatParams, TreatProjections};
|
||||
use crate::ty::visit::TypeVisitableExt;
|
||||
use crate::ty::{Ident, Ty, TyCtxt};
|
||||
use hir::def_id::LOCAL_CRATE;
|
||||
use rustc_hir as hir;
|
||||
|
|
@ -241,9 +240,6 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
|
|||
let impl_def_id = impl_def_id.to_def_id();
|
||||
|
||||
let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity();
|
||||
if impl_self_ty.references_error() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(simplified_self_ty) =
|
||||
fast_reject::simplify_type(tcx, impl_self_ty, TreatParams::AsCandidateKey)
|
||||
|
|
|
|||
|
|
@ -566,17 +566,28 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|||
body,
|
||||
&[
|
||||
&check_alignment::CheckAlignment,
|
||||
&lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
|
||||
// Before inlining: trim down MIR with passes to reduce inlining work.
|
||||
|
||||
// Has to be done before inlining, otherwise actual call will be almost always inlined.
|
||||
// Also simple, so can just do first
|
||||
&lower_slice_len::LowerSliceLenCalls,
|
||||
// Perform inlining, which may add a lot of code.
|
||||
&inline::Inline,
|
||||
// Substitutions during inlining may introduce switch on enums with uninhabited branches.
|
||||
// Code from other crates may have storage markers, so this needs to happen after inlining.
|
||||
&remove_storage_markers::RemoveStorageMarkers,
|
||||
// Inlining and substitution may introduce ZST and useless drops.
|
||||
&remove_zsts::RemoveZsts,
|
||||
&remove_unneeded_drops::RemoveUnneededDrops,
|
||||
// Type substitution may create uninhabited enums.
|
||||
&uninhabited_enum_branching::UninhabitedEnumBranching,
|
||||
&unreachable_prop::UnreachablePropagation,
|
||||
&o1(simplify::SimplifyCfg::AfterUninhabitedEnumBranching),
|
||||
&remove_storage_markers::RemoveStorageMarkers,
|
||||
&remove_zsts::RemoveZsts,
|
||||
&normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
|
||||
// Inlining may have introduced a lot of redundant code and a large move pattern.
|
||||
// Now, we need to shrink the generated MIR.
|
||||
|
||||
// Has to run after `slice::len` lowering
|
||||
&normalize_array_len::NormalizeArrayLen,
|
||||
&const_goto::ConstGoto,
|
||||
&remove_unneeded_drops::RemoveUnneededDrops,
|
||||
&ref_prop::ReferencePropagation,
|
||||
&sroa::ScalarReplacementOfAggregates,
|
||||
&match_branches::MatchBranchSimplification,
|
||||
|
|
|
|||
|
|
@ -20,6 +20,9 @@ monomorphize_recursion_limit =
|
|||
reached the recursion limit while instantiating `{$shrunk}`
|
||||
.note = `{$def_path_str}` defined here
|
||||
|
||||
monomorphize_start_not_found = using `fn main` requires the standard library
|
||||
.help = use `#![no_main]` to bypass the Rust generated entrypoint and declare a platform specific entrypoint yourself, usually with `#[no_mangle]`
|
||||
|
||||
monomorphize_symbol_already_defined = symbol `{$symbol}` is already defined
|
||||
|
||||
monomorphize_type_length_limit = reached the type-length limit while instantiating `{$shrunk}`
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ use rustc_target::abi::Size;
|
|||
use std::path::PathBuf;
|
||||
|
||||
use crate::errors::{
|
||||
EncounteredErrorWhileInstantiating, LargeAssignmentsLint, NoOptimizedMir, RecursionLimit,
|
||||
self, EncounteredErrorWhileInstantiating, LargeAssignmentsLint, NoOptimizedMir, RecursionLimit,
|
||||
TypeLengthLimit,
|
||||
};
|
||||
|
||||
|
|
@ -1272,7 +1272,9 @@ impl<'v> RootCollector<'_, 'v> {
|
|||
return;
|
||||
};
|
||||
|
||||
let start_def_id = self.tcx.require_lang_item(LangItem::Start, None);
|
||||
let Some(start_def_id) = self.tcx.lang_items().start_fn() else {
|
||||
self.tcx.dcx().emit_fatal(errors::StartNotFound);
|
||||
};
|
||||
let main_ret_ty = self.tcx.fn_sig(main_def_id).no_bound_vars().unwrap().output();
|
||||
|
||||
// Given that `main()` has no arguments,
|
||||
|
|
|
|||
|
|
@ -94,6 +94,11 @@ pub struct EncounteredErrorWhileInstantiating {
|
|||
pub formatted_item: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_start_not_found)]
|
||||
#[help]
|
||||
pub struct StartNotFound;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_unknown_cgu_collection_mode)]
|
||||
pub struct UnknownCguCollectionMode<'a> {
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use rustc_ast::ast::{self, AttrStyle};
|
|||
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::{error_code, Applicability, DiagCtxt, Diagnostic, StashKey};
|
||||
use rustc_errors::{error_code, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
|
||||
use rustc_lexer::unescape::{self, EscapeError, Mode};
|
||||
use rustc_lexer::{Base, DocStyle, RawStrError};
|
||||
use rustc_lexer::{Cursor, LiteralKind};
|
||||
|
|
@ -42,12 +42,12 @@ pub struct UnmatchedDelim {
|
|||
pub candidate_span: Option<Span>,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_token_trees<'a>(
|
||||
sess: &'a ParseSess,
|
||||
mut src: &'a str,
|
||||
pub(crate) fn parse_token_trees<'sess, 'src>(
|
||||
sess: &'sess ParseSess,
|
||||
mut src: &'src str,
|
||||
mut start_pos: BytePos,
|
||||
override_span: Option<Span>,
|
||||
) -> Result<TokenStream, Vec<Diagnostic>> {
|
||||
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
|
||||
// Skip `#!`, if present.
|
||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
|
||||
src = &src[shebang_len..];
|
||||
|
|
@ -76,13 +76,13 @@ pub(crate) fn parse_token_trees<'a>(
|
|||
let mut buffer = Vec::with_capacity(1);
|
||||
for unmatched in unmatched_delims {
|
||||
if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
|
||||
err.buffer(&mut buffer);
|
||||
buffer.push(err);
|
||||
}
|
||||
}
|
||||
if let Err(errs) = res {
|
||||
// Add unclosing delimiter or diff marker errors
|
||||
for err in errs {
|
||||
err.buffer(&mut buffer);
|
||||
buffer.push(err);
|
||||
}
|
||||
}
|
||||
Err(buffer)
|
||||
|
|
@ -90,16 +90,16 @@ pub(crate) fn parse_token_trees<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
struct StringReader<'a> {
|
||||
sess: &'a ParseSess,
|
||||
struct StringReader<'sess, 'src> {
|
||||
sess: &'sess ParseSess,
|
||||
/// Initial position, read-only.
|
||||
start_pos: BytePos,
|
||||
/// The absolute offset within the source_map of the current character.
|
||||
pos: BytePos,
|
||||
/// Source text to tokenize.
|
||||
src: &'a str,
|
||||
src: &'src str,
|
||||
/// Cursor for getting lexer tokens.
|
||||
cursor: Cursor<'a>,
|
||||
cursor: Cursor<'src>,
|
||||
override_span: Option<Span>,
|
||||
/// When a "unknown start of token: \u{a0}" has already been emitted earlier
|
||||
/// in this file, it's safe to treat further occurrences of the non-breaking
|
||||
|
|
@ -107,8 +107,8 @@ struct StringReader<'a> {
|
|||
nbsp_is_whitespace: bool,
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
pub fn dcx(&self) -> &'a DiagCtxt {
|
||||
impl<'sess, 'src> StringReader<'sess, 'src> {
|
||||
pub fn dcx(&self) -> &'sess DiagCtxt {
|
||||
&self.sess.dcx
|
||||
}
|
||||
|
||||
|
|
@ -526,7 +526,7 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
/// Slice of the source text from `start` up to but excluding `self.pos`,
|
||||
/// meaning the slice does not include the character `self.ch`.
|
||||
fn str_from(&self, start: BytePos) -> &'a str {
|
||||
fn str_from(&self, start: BytePos) -> &'src str {
|
||||
self.str_from_to(start, self.pos)
|
||||
}
|
||||
|
||||
|
|
@ -537,12 +537,12 @@ impl<'a> StringReader<'a> {
|
|||
}
|
||||
|
||||
/// Slice of the source text spanning from `start` up to but excluding `end`.
|
||||
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'a str {
|
||||
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
|
||||
&self.src[self.src_index(start)..self.src_index(end)]
|
||||
}
|
||||
|
||||
/// Slice of the source text spanning from `start` until the end
|
||||
fn str_from_to_end(&self, start: BytePos) -> &'a str {
|
||||
fn str_from_to_end(&self, start: BytePos) -> &'src str {
|
||||
&self.src[self.src_index(start)..]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,18 +8,18 @@ use rustc_ast_pretty::pprust::token_to_string;
|
|||
use rustc_errors::{Applicability, PErr};
|
||||
use rustc_span::symbol::kw;
|
||||
|
||||
pub(super) struct TokenTreesReader<'a> {
|
||||
string_reader: StringReader<'a>,
|
||||
pub(super) struct TokenTreesReader<'sess, 'src> {
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
/// The "next" token, which has been obtained from the `StringReader` but
|
||||
/// not yet handled by the `TokenTreesReader`.
|
||||
token: Token,
|
||||
diag_info: TokenTreeDiagInfo,
|
||||
}
|
||||
|
||||
impl<'a> TokenTreesReader<'a> {
|
||||
impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
||||
pub(super) fn parse_all_token_trees(
|
||||
string_reader: StringReader<'a>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'a>>>, Vec<UnmatchedDelim>) {
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
|
||||
let mut tt_reader = TokenTreesReader {
|
||||
string_reader,
|
||||
token: Token::dummy(),
|
||||
|
|
@ -35,7 +35,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
fn parse_token_trees(
|
||||
&mut self,
|
||||
is_delimited: bool,
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
|
||||
// Move past the opening delimiter.
|
||||
let (_, open_spacing) = self.bump(false);
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn eof_err(&mut self) -> PErr<'a> {
|
||||
fn eof_err(&mut self) -> PErr<'sess> {
|
||||
let msg = "this file contains an unclosed delimiter";
|
||||
let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
|
||||
for &(_, sp) in &self.diag_info.open_braces {
|
||||
|
|
@ -99,7 +99,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
fn parse_token_tree_open_delim(
|
||||
&mut self,
|
||||
open_delim: Delimiter,
|
||||
) -> Result<TokenTree, Vec<PErr<'a>>> {
|
||||
) -> Result<TokenTree, Vec<PErr<'sess>>> {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.token.span;
|
||||
|
||||
|
|
@ -229,7 +229,11 @@ impl<'a> TokenTreesReader<'a> {
|
|||
(this_tok, this_spacing)
|
||||
}
|
||||
|
||||
fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
|
||||
fn unclosed_delim_err(
|
||||
&mut self,
|
||||
tts: TokenStream,
|
||||
mut errs: Vec<PErr<'sess>>,
|
||||
) -> Vec<PErr<'sess>> {
|
||||
// If there are unclosed delims, see if there are diff markers and if so, point them
|
||||
// out instead of complaining about the unclosed delims.
|
||||
let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
|
||||
|
|
@ -285,7 +289,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
return errs;
|
||||
}
|
||||
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
|
||||
// An unexpected closing delimiter (i.e., there is no
|
||||
// matching opening delimiter).
|
||||
let token_str = token_to_string(&self.token);
|
||||
|
|
|
|||
|
|
@ -337,7 +337,7 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
|||
];
|
||||
|
||||
pub(super) fn check_for_substitution(
|
||||
reader: &StringReader<'_>,
|
||||
reader: &StringReader<'_, '_>,
|
||||
pos: BytePos,
|
||||
ch: char,
|
||||
count: usize,
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ use rustc_ast::tokenstream::TokenStream;
|
|||
use rustc_ast::{AttrItem, Attribute, MetaItem};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
||||
use rustc_errors::{DiagnosticBuilder, FatalError, PResult};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{FileName, SourceFile, Span};
|
||||
|
||||
|
|
@ -45,14 +45,13 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
|||
/// A variant of 'panictry!' that works on a `Vec<Diagnostic>` instead of a single
|
||||
/// `DiagnosticBuilder`.
|
||||
macro_rules! panictry_buffer {
|
||||
($handler:expr, $e:expr) => {{
|
||||
use rustc_errors::FatalError;
|
||||
($e:expr) => {{
|
||||
use std::result::Result::{Err, Ok};
|
||||
match $e {
|
||||
Ok(e) => e,
|
||||
Err(errs) => {
|
||||
for e in errs {
|
||||
$handler.emit_diagnostic(e);
|
||||
e.emit();
|
||||
}
|
||||
FatalError.raise()
|
||||
}
|
||||
|
|
@ -100,36 +99,41 @@ pub fn parse_stream_from_source_str(
|
|||
|
||||
/// Creates a new parser from a source string.
|
||||
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
|
||||
panictry_buffer!(&sess.dcx, maybe_new_parser_from_source_str(sess, name, source))
|
||||
panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source))
|
||||
}
|
||||
|
||||
/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
|
||||
/// token stream.
|
||||
/// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
|
||||
/// when they are dropped.
|
||||
pub fn maybe_new_parser_from_source_str(
|
||||
sess: &ParseSess,
|
||||
name: FileName,
|
||||
source: String,
|
||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
|
||||
maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
|
||||
}
|
||||
|
||||
/// Creates a new parser, handling errors as appropriate if the file doesn't exist.
|
||||
/// If a span is given, that is used on an error as the source of the problem.
|
||||
/// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on
|
||||
/// an error as the source of the problem.
|
||||
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
|
||||
source_file_to_parser(sess, file_to_source_file(sess, path, sp))
|
||||
let source_file = sess.source_map().load_file(path).unwrap_or_else(|e| {
|
||||
let msg = format!("couldn't read {}: {}", path.display(), e);
|
||||
let mut err = sess.dcx.struct_fatal(msg);
|
||||
if let Some(sp) = sp {
|
||||
err.span(sp);
|
||||
}
|
||||
err.emit();
|
||||
});
|
||||
|
||||
panictry_buffer!(maybe_source_file_to_parser(sess, source_file))
|
||||
}
|
||||
|
||||
/// Given a session and a `source_file`, returns a parser.
|
||||
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
|
||||
panictry_buffer!(&sess.dcx, maybe_source_file_to_parser(sess, source_file))
|
||||
}
|
||||
|
||||
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing the
|
||||
/// initial token stream.
|
||||
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
|
||||
/// the initial token stream.
|
||||
fn maybe_source_file_to_parser(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
|
||||
let end_pos = source_file.end_position();
|
||||
let stream = maybe_file_to_stream(sess, source_file, None)?;
|
||||
let mut parser = stream_to_parser(sess, stream, None);
|
||||
|
|
@ -142,52 +146,22 @@ fn maybe_source_file_to_parser(
|
|||
|
||||
// Base abstractions
|
||||
|
||||
/// Given a session and a path and an optional span (for error reporting),
|
||||
/// add the path to the session's source_map and return the new source_file or
|
||||
/// error when a file can't be read.
|
||||
fn try_file_to_source_file(
|
||||
sess: &ParseSess,
|
||||
path: &Path,
|
||||
spanopt: Option<Span>,
|
||||
) -> Result<Lrc<SourceFile>, Diagnostic> {
|
||||
sess.source_map().load_file(path).map_err(|e| {
|
||||
let msg = format!("couldn't read {}: {}", path.display(), e);
|
||||
let mut diag = Diagnostic::new(Level::Fatal, msg);
|
||||
if let Some(sp) = spanopt {
|
||||
diag.span(sp);
|
||||
}
|
||||
diag
|
||||
})
|
||||
}
|
||||
|
||||
/// Given a session and a path and an optional span (for error reporting),
|
||||
/// adds the path to the session's `source_map` and returns the new `source_file`.
|
||||
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
|
||||
match try_file_to_source_file(sess, path, spanopt) {
|
||||
Ok(source_file) => source_file,
|
||||
Err(d) => {
|
||||
sess.dcx.emit_diagnostic(d);
|
||||
FatalError.raise();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a `source_file`, produces a sequence of token trees.
|
||||
pub fn source_file_to_stream(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>,
|
||||
) -> TokenStream {
|
||||
panictry_buffer!(&sess.dcx, maybe_file_to_stream(sess, source_file, override_span))
|
||||
panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span))
|
||||
}
|
||||
|
||||
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
|
||||
/// parsing the token stream.
|
||||
pub fn maybe_file_to_stream(
|
||||
sess: &ParseSess,
|
||||
fn maybe_file_to_stream<'sess>(
|
||||
sess: &'sess ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>,
|
||||
) -> Result<TokenStream, Vec<Diagnostic>> {
|
||||
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
|
||||
let src = source_file.src.as_ref().unwrap_or_else(|| {
|
||||
sess.dcx.bug(format!(
|
||||
"cannot lex `source_file` without source: {}",
|
||||
|
|
|
|||
|
|
@ -575,6 +575,11 @@ passes_outside_loop =
|
|||
|
||||
passes_outside_loop_suggestion = consider labeling this block to be able to break within it
|
||||
|
||||
passes_panic_unwind_without_std =
|
||||
unwinding panics are not supported without std
|
||||
.note = since the core library is usually precompiled with panic="unwind", rebuilding your crate with panic="abort" may not be enough to fix the problem
|
||||
.help = using nightly cargo, use -Zbuild-std with panic="abort" to avoid unwinding
|
||||
|
||||
passes_params_not_allowed =
|
||||
referencing function parameters is not allowed in naked functions
|
||||
.help = follow the calling convention in asm block to use parameters
|
||||
|
|
|
|||
|
|
@ -812,6 +812,12 @@ pub struct UnknownExternLangItem {
|
|||
#[diag(passes_missing_panic_handler)]
|
||||
pub struct MissingPanicHandler;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(passes_panic_unwind_without_std)]
|
||||
#[help]
|
||||
#[note]
|
||||
pub struct PanicUnwindWithoutStd;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(passes_missing_lang_item)]
|
||||
#[note]
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ use rustc_middle::middle::lang_items::required;
|
|||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::config::CrateType;
|
||||
|
||||
use crate::errors::{MissingLangItem, MissingPanicHandler, UnknownExternLangItem};
|
||||
use crate::errors::{
|
||||
MissingLangItem, MissingPanicHandler, PanicUnwindWithoutStd, UnknownExternLangItem,
|
||||
};
|
||||
|
||||
/// Checks the crate for usage of weak lang items, returning a vector of all the
|
||||
/// language items required by this crate, but not defined yet.
|
||||
|
|
@ -76,6 +78,8 @@ fn verify(tcx: TyCtxt<'_>, items: &lang_items::LanguageItems) {
|
|||
if missing.contains(&item) && required(tcx, item) && items.get(item).is_none() {
|
||||
if item == LangItem::PanicImpl {
|
||||
tcx.dcx().emit_err(MissingPanicHandler);
|
||||
} else if item == LangItem::EhPersonality {
|
||||
tcx.dcx().emit_err(PanicUnwindWithoutStd);
|
||||
} else {
|
||||
tcx.dcx().emit_err(MissingLangItem { name: item.name() });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -124,8 +124,10 @@ pub fn analyze_match<'p, 'tcx>(
|
|||
|
||||
let pat_column = PatternColumn::new(arms);
|
||||
|
||||
// Lint on ranges that overlap on their endpoints, which is likely a mistake.
|
||||
lint_overlapping_range_endpoints(cx, &pat_column)?;
|
||||
// Lint ranges that overlap on their endpoints, which is likely a mistake.
|
||||
if !report.overlapping_range_endpoints.is_empty() {
|
||||
lint_overlapping_range_endpoints(cx, &report.overlapping_range_endpoints);
|
||||
}
|
||||
|
||||
// Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting
|
||||
// `if let`s. Only run if the match is exhaustive otherwise the error is redundant.
|
||||
|
|
|
|||
|
|
@ -1,20 +1,14 @@
|
|||
use smallvec::SmallVec;
|
||||
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_middle::ty;
|
||||
use rustc_session::lint;
|
||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||
use rustc_span::{ErrorGuaranteed, Span};
|
||||
use rustc_span::ErrorGuaranteed;
|
||||
|
||||
use crate::constructor::{IntRange, MaybeInfiniteInt};
|
||||
use crate::errors::{
|
||||
NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap,
|
||||
OverlappingRangeEndpoints, Uncovered,
|
||||
self, NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Uncovered,
|
||||
};
|
||||
use crate::pat::PatOrWild;
|
||||
use crate::rustc::{
|
||||
Constructor, DeconstructedPat, MatchArm, MatchCtxt, PlaceCtxt, RevealedTy, RustcMatchCheckCtxt,
|
||||
SplitConstructorSet, WitnessPat,
|
||||
self, Constructor, DeconstructedPat, MatchArm, MatchCtxt, PlaceCtxt, RevealedTy,
|
||||
RustcMatchCheckCtxt, SplitConstructorSet, WitnessPat,
|
||||
};
|
||||
|
||||
/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that
|
||||
|
|
@ -68,10 +62,6 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||
Ok(ctors_for_ty.split(pcx, column_ctors))
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'_> {
|
||||
self.patterns.iter().copied()
|
||||
}
|
||||
|
||||
/// Does specialization: given a constructor, this takes the patterns from the column that match
|
||||
/// the constructor, and outputs their fields.
|
||||
/// This returns one column per field of the constructor. They usually all have the same length
|
||||
|
|
@ -207,78 +197,25 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Traverse the patterns to warn the user about ranges that overlap on their endpoints.
|
||||
#[instrument(level = "debug", skip(cx))]
|
||||
pub(crate) fn lint_overlapping_range_endpoints<'a, 'p, 'tcx>(
|
||||
cx: MatchCtxt<'a, 'p, 'tcx>,
|
||||
column: &PatternColumn<'p, 'tcx>,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let Some(ty) = column.head_ty() else {
|
||||
return Ok(());
|
||||
};
|
||||
let pcx = &PlaceCtxt::new_dummy(cx, ty);
|
||||
let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx;
|
||||
|
||||
let set = column.analyze_ctors(pcx)?;
|
||||
|
||||
if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) {
|
||||
let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| {
|
||||
let overlap_as_pat = rcx.hoist_pat_range(overlap, ty);
|
||||
let overlaps: Vec<_> = overlapped_spans
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|span| Overlap { range: overlap_as_pat.clone(), span })
|
||||
.collect();
|
||||
rcx.tcx.emit_spanned_lint(
|
||||
lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
|
||||
rcx.match_lint_level,
|
||||
this_span,
|
||||
OverlappingRangeEndpoints { overlap: overlaps, range: this_span },
|
||||
);
|
||||
};
|
||||
|
||||
// If two ranges overlapped, the split set will contain their intersection as a singleton.
|
||||
let split_int_ranges = set.present.iter().filter_map(|c| c.as_int_range());
|
||||
for overlap_range in split_int_ranges.clone() {
|
||||
if overlap_range.is_singleton() {
|
||||
let overlap: MaybeInfiniteInt = overlap_range.lo;
|
||||
// Ranges that look like `lo..=overlap`.
|
||||
let mut prefixes: SmallVec<[_; 1]> = Default::default();
|
||||
// Ranges that look like `overlap..=hi`.
|
||||
let mut suffixes: SmallVec<[_; 1]> = Default::default();
|
||||
// Iterate on patterns that contained `overlap`.
|
||||
for pat in column.iter() {
|
||||
let Constructor::IntRange(this_range) = pat.ctor() else { continue };
|
||||
let this_span = pat.data().unwrap().span;
|
||||
if this_range.is_singleton() {
|
||||
// Don't lint when one of the ranges is a singleton.
|
||||
continue;
|
||||
}
|
||||
if this_range.lo == overlap {
|
||||
// `this_range` looks like `overlap..=this_range.hi`; it overlaps with any
|
||||
// ranges that look like `lo..=overlap`.
|
||||
if !prefixes.is_empty() {
|
||||
emit_lint(overlap_range, this_span, &prefixes);
|
||||
}
|
||||
suffixes.push(this_span)
|
||||
} else if this_range.hi == overlap.plus_one() {
|
||||
// `this_range` looks like `this_range.lo..=overlap`; it overlaps with any
|
||||
// ranges that look like `overlap..=hi`.
|
||||
if !suffixes.is_empty() {
|
||||
emit_lint(overlap_range, this_span, &suffixes);
|
||||
}
|
||||
prefixes.push(this_span)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Recurse into the fields.
|
||||
for ctor in set.present {
|
||||
for col in column.specialize(pcx, &ctor) {
|
||||
lint_overlapping_range_endpoints(cx, &col)?;
|
||||
}
|
||||
}
|
||||
overlapping_range_endpoints: &[rustc::OverlappingRanges<'p, 'tcx>],
|
||||
) {
|
||||
let rcx = cx.tycx;
|
||||
for overlap in overlapping_range_endpoints {
|
||||
let overlap_as_pat = rcx.hoist_pat_range(&overlap.overlaps_on, overlap.pat.ty());
|
||||
let overlaps: Vec<_> = overlap
|
||||
.overlaps_with
|
||||
.iter()
|
||||
.map(|pat| pat.data().unwrap().span)
|
||||
.map(|span| errors::Overlap { range: overlap_as_pat.clone(), span })
|
||||
.collect();
|
||||
let pat_span = overlap.pat.data().unwrap().span;
|
||||
rcx.tcx.emit_spanned_lint(
|
||||
lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
|
||||
rcx.match_lint_level,
|
||||
pat_span,
|
||||
errors::OverlappingRangeEndpoints { overlap: overlaps, range: pat_span },
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ pub type DeconstructedPat<'p, 'tcx> =
|
|||
crate::pat::DeconstructedPat<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||
pub type MatchArm<'p, 'tcx> = crate::MatchArm<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||
pub type MatchCtxt<'a, 'p, 'tcx> = crate::MatchCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||
pub type OverlappingRanges<'p, 'tcx> =
|
||||
crate::usefulness::OverlappingRanges<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||
pub(crate) type PlaceCtxt<'a, 'p, 'tcx> =
|
||||
crate::usefulness::PlaceCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||
pub(crate) type SplitConstructorSet<'p, 'tcx> =
|
||||
|
|
|
|||
|
|
@ -712,10 +712,11 @@
|
|||
//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific
|
||||
//! reason not to, for example if they crucially depend on a particular feature like `or_patterns`.
|
||||
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::fmt;
|
||||
|
||||
use crate::constructor::{Constructor, ConstructorSet};
|
||||
use crate::constructor::{Constructor, ConstructorSet, IntRange};
|
||||
use crate::pat::{DeconstructedPat, PatOrWild, WitnessPat};
|
||||
use crate::{Captures, MatchArm, MatchCtxt, TypeCx};
|
||||
|
||||
|
|
@ -911,6 +912,11 @@ struct MatrixRow<'p, Cx: TypeCx> {
|
|||
/// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful.
|
||||
/// This is reset to `false` when specializing.
|
||||
useful: bool,
|
||||
/// Tracks which rows above this one have an intersection with this one, i.e. such that there is
|
||||
/// a value that matches both rows.
|
||||
/// Note: Because of relevancy we may miss some intersections. The intersections we do find are
|
||||
/// correct.
|
||||
intersects: BitSet<usize>,
|
||||
}
|
||||
|
||||
impl<'p, Cx: TypeCx> MatrixRow<'p, Cx> {
|
||||
|
|
@ -938,6 +944,7 @@ impl<'p, Cx: TypeCx> MatrixRow<'p, Cx> {
|
|||
parent_row: self.parent_row,
|
||||
is_under_guard: self.is_under_guard,
|
||||
useful: false,
|
||||
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -955,6 +962,7 @@ impl<'p, Cx: TypeCx> MatrixRow<'p, Cx> {
|
|||
parent_row,
|
||||
is_under_guard: self.is_under_guard,
|
||||
useful: false,
|
||||
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -993,13 +1001,15 @@ struct Matrix<'p, Cx: TypeCx> {
|
|||
impl<'p, Cx: TypeCx> Matrix<'p, Cx> {
|
||||
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
||||
/// expands it. Internal method, prefer [`Matrix::new`].
|
||||
fn expand_and_push(&mut self, row: MatrixRow<'p, Cx>) {
|
||||
fn expand_and_push(&mut self, mut row: MatrixRow<'p, Cx>) {
|
||||
if !row.is_empty() && row.head().is_or_pat() {
|
||||
// Expand nested or-patterns.
|
||||
for new_row in row.expand_or_pat() {
|
||||
for mut new_row in row.expand_or_pat() {
|
||||
new_row.intersects = BitSet::new_empty(self.rows.len());
|
||||
self.rows.push(new_row);
|
||||
}
|
||||
} else {
|
||||
row.intersects = BitSet::new_empty(self.rows.len());
|
||||
self.rows.push(row);
|
||||
}
|
||||
}
|
||||
|
|
@ -1019,9 +1029,10 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> {
|
|||
for (row_id, arm) in arms.iter().enumerate() {
|
||||
let v = MatrixRow {
|
||||
pats: PatStack::from_pattern(arm.pat),
|
||||
parent_row: row_id, // dummy, we won't read it
|
||||
parent_row: row_id, // dummy, we don't read it
|
||||
is_under_guard: arm.has_guard,
|
||||
useful: false,
|
||||
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
|
||||
};
|
||||
matrix.expand_and_push(v);
|
||||
}
|
||||
|
|
@ -1317,6 +1328,83 @@ impl<Cx: TypeCx> WitnessMatrix<Cx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Collect ranges that overlap like `lo..=overlap`/`overlap..=hi`. Must be called during
|
||||
/// exhaustiveness checking, if we find a singleton range after constructor splitting. This reuses
|
||||
/// row intersection information to only detect ranges that truly overlap.
|
||||
///
|
||||
/// If two ranges overlapped, the split set will contain their intersection as a singleton.
|
||||
/// Specialization will then select rows that match the overlap, and exhaustiveness will compute
|
||||
/// which rows have an intersection that includes the overlap. That gives us all the info we need to
|
||||
/// compute overlapping ranges without false positives.
|
||||
///
|
||||
/// We can however get false negatives because exhaustiveness does not explore all cases. See the
|
||||
/// section on relevancy at the top of the file.
|
||||
fn collect_overlapping_range_endpoints<'p, Cx: TypeCx>(
|
||||
overlap_range: IntRange,
|
||||
matrix: &Matrix<'p, Cx>,
|
||||
specialized_matrix: &Matrix<'p, Cx>,
|
||||
overlapping_range_endpoints: &mut Vec<OverlappingRanges<'p, Cx>>,
|
||||
) {
|
||||
let overlap = overlap_range.lo;
|
||||
// Ranges that look like `lo..=overlap`.
|
||||
let mut prefixes: SmallVec<[_; 1]> = Default::default();
|
||||
// Ranges that look like `overlap..=hi`.
|
||||
let mut suffixes: SmallVec<[_; 1]> = Default::default();
|
||||
// Iterate on patterns that contained `overlap`. We iterate on `specialized_matrix` which
|
||||
// contains only rows that matched the current `ctor` as well as accurate intersection
|
||||
// information. It doesn't contain the column that contains the range; that can be found in
|
||||
// `matrix`.
|
||||
for (child_row_id, child_row) in specialized_matrix.rows().enumerate() {
|
||||
let PatOrWild::Pat(pat) = matrix.rows[child_row.parent_row].head() else { continue };
|
||||
let Constructor::IntRange(this_range) = pat.ctor() else { continue };
|
||||
// Don't lint when one of the ranges is a singleton.
|
||||
if this_range.is_singleton() {
|
||||
continue;
|
||||
}
|
||||
if this_range.lo == overlap {
|
||||
// `this_range` looks like `overlap..=this_range.hi`; it overlaps with any
|
||||
// ranges that look like `lo..=overlap`.
|
||||
if !prefixes.is_empty() {
|
||||
let overlaps_with: Vec<_> = prefixes
|
||||
.iter()
|
||||
.filter(|&&(other_child_row_id, _)| {
|
||||
child_row.intersects.contains(other_child_row_id)
|
||||
})
|
||||
.map(|&(_, pat)| pat)
|
||||
.collect();
|
||||
if !overlaps_with.is_empty() {
|
||||
overlapping_range_endpoints.push(OverlappingRanges {
|
||||
pat,
|
||||
overlaps_on: overlap_range,
|
||||
overlaps_with,
|
||||
});
|
||||
}
|
||||
}
|
||||
suffixes.push((child_row_id, pat))
|
||||
} else if this_range.hi == overlap.plus_one() {
|
||||
// `this_range` looks like `this_range.lo..=overlap`; it overlaps with any
|
||||
// ranges that look like `overlap..=hi`.
|
||||
if !suffixes.is_empty() {
|
||||
let overlaps_with: Vec<_> = suffixes
|
||||
.iter()
|
||||
.filter(|&&(other_child_row_id, _)| {
|
||||
child_row.intersects.contains(other_child_row_id)
|
||||
})
|
||||
.map(|&(_, pat)| pat)
|
||||
.collect();
|
||||
if !overlaps_with.is_empty() {
|
||||
overlapping_range_endpoints.push(OverlappingRanges {
|
||||
pat,
|
||||
overlaps_on: overlap_range,
|
||||
overlaps_with,
|
||||
});
|
||||
}
|
||||
}
|
||||
prefixes.push((child_row_id, pat))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The core of the algorithm.
|
||||
///
|
||||
/// This recursively computes witnesses of the non-exhaustiveness of `matrix` (if any). Also tracks
|
||||
|
|
@ -1335,6 +1423,7 @@ impl<Cx: TypeCx> WitnessMatrix<Cx> {
|
|||
fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
|
||||
mcx: MatchCtxt<'a, 'p, Cx>,
|
||||
matrix: &mut Matrix<'p, Cx>,
|
||||
overlapping_range_endpoints: &mut Vec<OverlappingRanges<'p, Cx>>,
|
||||
is_top_level: bool,
|
||||
) -> Result<WitnessMatrix<Cx>, Cx::Error> {
|
||||
debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count()));
|
||||
|
|
@ -1349,21 +1438,19 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
|
|||
let Some(ty) = matrix.head_ty() else {
|
||||
// The base case: there are no columns in the matrix. We are morally pattern-matching on ().
|
||||
// A row is useful iff it has no (unguarded) rows above it.
|
||||
for row in matrix.rows_mut() {
|
||||
// All rows are useful until they're not.
|
||||
row.useful = true;
|
||||
// When there's an unguarded row, the match is exhaustive and any subsequent row is not
|
||||
// useful.
|
||||
if !row.is_under_guard {
|
||||
return Ok(WitnessMatrix::empty());
|
||||
}
|
||||
let mut useful = true; // Whether the next row is useful.
|
||||
for (i, row) in matrix.rows_mut().enumerate() {
|
||||
row.useful = useful;
|
||||
row.intersects.insert_range(0..i);
|
||||
// The next rows stays useful if this one is under a guard.
|
||||
useful &= row.is_under_guard;
|
||||
}
|
||||
// No (unguarded) rows, so the match is not exhaustive. We return a new witness unless
|
||||
// irrelevant.
|
||||
return if matrix.wildcard_row_is_relevant {
|
||||
return if useful && matrix.wildcard_row_is_relevant {
|
||||
// The wildcard row is useful; the match is non-exhaustive.
|
||||
Ok(WitnessMatrix::unit_witness())
|
||||
} else {
|
||||
// We choose to not report anything here; see at the top for details.
|
||||
// Either the match is exhaustive, or we choose not to report anything because of
|
||||
// relevancy. See at the top for details.
|
||||
Ok(WitnessMatrix::empty())
|
||||
};
|
||||
};
|
||||
|
|
@ -1416,7 +1503,12 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
|
|||
let ctor_is_relevant = matches!(ctor, Constructor::Missing) || missing_ctors.is_empty();
|
||||
let mut spec_matrix = matrix.specialize_constructor(pcx, &ctor, ctor_is_relevant);
|
||||
let mut witnesses = ensure_sufficient_stack(|| {
|
||||
compute_exhaustiveness_and_usefulness(mcx, &mut spec_matrix, false)
|
||||
compute_exhaustiveness_and_usefulness(
|
||||
mcx,
|
||||
&mut spec_matrix,
|
||||
overlapping_range_endpoints,
|
||||
false,
|
||||
)
|
||||
})?;
|
||||
|
||||
// Transform witnesses for `spec_matrix` into witnesses for `matrix`.
|
||||
|
|
@ -1424,10 +1516,34 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
|
|||
// Accumulate the found witnesses.
|
||||
ret.extend(witnesses);
|
||||
|
||||
// A parent row is useful if any of its children is.
|
||||
for child_row in spec_matrix.rows() {
|
||||
let parent_row = &mut matrix.rows[child_row.parent_row];
|
||||
parent_row.useful = parent_row.useful || child_row.useful;
|
||||
let parent_row_id = child_row.parent_row;
|
||||
let parent_row = &mut matrix.rows[parent_row_id];
|
||||
// A parent row is useful if any of its children is.
|
||||
parent_row.useful |= child_row.useful;
|
||||
for child_intersection in child_row.intersects.iter() {
|
||||
// Convert the intersecting ids into ids for the parent matrix.
|
||||
let parent_intersection = spec_matrix.rows[child_intersection].parent_row;
|
||||
// Note: self-intersection can happen with or-patterns.
|
||||
if parent_intersection != parent_row_id {
|
||||
parent_row.intersects.insert(parent_intersection);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detect ranges that overlap on their endpoints.
|
||||
if let Constructor::IntRange(overlap_range) = ctor {
|
||||
if overlap_range.is_singleton()
|
||||
&& spec_matrix.rows.len() >= 2
|
||||
&& spec_matrix.rows.iter().any(|row| !row.intersects.is_empty())
|
||||
{
|
||||
collect_overlapping_range_endpoints(
|
||||
overlap_range,
|
||||
matrix,
|
||||
&spec_matrix,
|
||||
overlapping_range_endpoints,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1453,6 +1569,15 @@ pub enum Usefulness<'p, Cx: TypeCx> {
|
|||
Redundant,
|
||||
}
|
||||
|
||||
/// Indicates that the range `pat` overlapped with all the ranges in `overlaps_with`, where the
|
||||
/// range they overlapped over is `overlaps_on`. We only detect singleton overlaps.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OverlappingRanges<'p, Cx: TypeCx> {
|
||||
pub pat: &'p DeconstructedPat<'p, Cx>,
|
||||
pub overlaps_on: IntRange,
|
||||
pub overlaps_with: Vec<&'p DeconstructedPat<'p, Cx>>,
|
||||
}
|
||||
|
||||
/// The output of checking a match for exhaustiveness and arm usefulness.
|
||||
pub struct UsefulnessReport<'p, Cx: TypeCx> {
|
||||
/// For each arm of the input, whether that arm is useful after the arms above it.
|
||||
|
|
@ -1460,6 +1585,7 @@ pub struct UsefulnessReport<'p, Cx: TypeCx> {
|
|||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
||||
/// exhaustiveness.
|
||||
pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
|
||||
pub overlapping_range_endpoints: Vec<OverlappingRanges<'p, Cx>>,
|
||||
}
|
||||
|
||||
/// Computes whether a match is exhaustive and which of its arms are useful.
|
||||
|
|
@ -1470,9 +1596,14 @@ pub fn compute_match_usefulness<'p, Cx: TypeCx>(
|
|||
scrut_ty: Cx::Ty,
|
||||
scrut_validity: ValidityConstraint,
|
||||
) -> Result<UsefulnessReport<'p, Cx>, Cx::Error> {
|
||||
let mut overlapping_range_endpoints = Vec::new();
|
||||
let mut matrix = Matrix::new(arms, scrut_ty, scrut_validity);
|
||||
let non_exhaustiveness_witnesses =
|
||||
compute_exhaustiveness_and_usefulness(cx, &mut matrix, true)?;
|
||||
let non_exhaustiveness_witnesses = compute_exhaustiveness_and_usefulness(
|
||||
cx,
|
||||
&mut matrix,
|
||||
&mut overlapping_range_endpoints,
|
||||
true,
|
||||
)?;
|
||||
|
||||
let non_exhaustiveness_witnesses: Vec<_> = non_exhaustiveness_witnesses.single_column();
|
||||
let arm_usefulness: Vec<_> = arms
|
||||
|
|
@ -1489,5 +1620,10 @@ pub fn compute_match_usefulness<'p, Cx: TypeCx>(
|
|||
(arm, usefulness)
|
||||
})
|
||||
.collect();
|
||||
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses })
|
||||
|
||||
Ok(UsefulnessReport {
|
||||
arm_usefulness,
|
||||
non_exhaustiveness_witnesses,
|
||||
overlapping_range_endpoints,
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,14 +70,6 @@ pub trait Encoder {
|
|||
}
|
||||
|
||||
fn emit_raw_bytes(&mut self, s: &[u8]);
|
||||
|
||||
fn emit_enum_variant<F>(&mut self, v_id: usize, f: F)
|
||||
where
|
||||
F: FnOnce(&mut Self),
|
||||
{
|
||||
self.emit_usize(v_id);
|
||||
f(self);
|
||||
}
|
||||
}
|
||||
|
||||
// Note: all the methods in this trait are infallible, which may be surprising.
|
||||
|
|
@ -132,10 +124,6 @@ pub trait Decoder {
|
|||
|
||||
fn read_raw_bytes(&mut self, len: usize) -> &[u8];
|
||||
|
||||
// Although there is an `emit_enum_variant` method in `Encoder`, the code
|
||||
// patterns in decoding are different enough to encoding that there is no
|
||||
// need for a corresponding `read_enum_variant` method here.
|
||||
|
||||
fn peek_byte(&self) -> u8;
|
||||
fn position(&self) -> usize;
|
||||
}
|
||||
|
|
@ -372,15 +360,18 @@ impl<'a, D: Decoder> Decodable<D> for Cow<'a, str> {
|
|||
impl<S: Encoder, T: Encodable<S>> Encodable<S> for Option<T> {
|
||||
fn encode(&self, s: &mut S) {
|
||||
match *self {
|
||||
None => s.emit_enum_variant(0, |_| {}),
|
||||
Some(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
|
||||
None => s.emit_u8(0),
|
||||
Some(ref v) => {
|
||||
s.emit_u8(1);
|
||||
v.encode(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> {
|
||||
fn decode(d: &mut D) -> Option<T> {
|
||||
match d.read_usize() {
|
||||
match d.read_u8() {
|
||||
0 => None,
|
||||
1 => Some(Decodable::decode(d)),
|
||||
_ => panic!("Encountered invalid discriminant while decoding `Option`."),
|
||||
|
|
@ -391,15 +382,21 @@ impl<D: Decoder, T: Decodable<D>> Decodable<D> for Option<T> {
|
|||
impl<S: Encoder, T1: Encodable<S>, T2: Encodable<S>> Encodable<S> for Result<T1, T2> {
|
||||
fn encode(&self, s: &mut S) {
|
||||
match *self {
|
||||
Ok(ref v) => s.emit_enum_variant(0, |s| v.encode(s)),
|
||||
Err(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
|
||||
Ok(ref v) => {
|
||||
s.emit_u8(0);
|
||||
v.encode(s);
|
||||
}
|
||||
Err(ref v) => {
|
||||
s.emit_u8(1);
|
||||
v.encode(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Decoder, T1: Decodable<D>, T2: Decodable<D>> Decodable<D> for Result<T1, T2> {
|
||||
fn decode(d: &mut D) -> Result<T1, T2> {
|
||||
match d.read_usize() {
|
||||
match d.read_u8() {
|
||||
0 => Ok(T1::decode(d)),
|
||||
1 => Err(T2::decode(d)),
|
||||
_ => panic!("Encountered invalid discriminant while decoding `Result`."),
|
||||
|
|
|
|||
|
|
@ -1146,6 +1146,7 @@ impl UnstableOptions {
|
|||
DiagCtxtFlags {
|
||||
can_emit_warnings,
|
||||
treat_err_as_bug: self.treat_err_as_bug,
|
||||
eagerly_emit_delayed_bugs: self.eagerly_emit_delayed_bugs,
|
||||
macro_backtrace: self.macro_backtrace,
|
||||
deduplicate_diagnostics: self.deduplicate_diagnostics,
|
||||
track_diagnostics: self.track_diagnostics,
|
||||
|
|
@ -1379,6 +1380,8 @@ pub struct CheckCfg {
|
|||
pub exhaustive_values: bool,
|
||||
/// All the expected values for a config name
|
||||
pub expecteds: FxHashMap<Symbol, ExpectedValues<Symbol>>,
|
||||
/// Well known names (only used for diagnostics purposes)
|
||||
pub well_known_names: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
pub enum ExpectedValues<T> {
|
||||
|
|
@ -1431,9 +1434,10 @@ impl CheckCfg {
|
|||
};
|
||||
|
||||
macro_rules! ins {
|
||||
($name:expr, $values:expr) => {
|
||||
($name:expr, $values:expr) => {{
|
||||
self.well_known_names.insert($name);
|
||||
self.expecteds.entry($name).or_insert_with($values)
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
// Symbols are inserted in alphabetical order as much as possible.
|
||||
|
|
@ -1823,7 +1827,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
|||
"Remap source names in all output (compiler messages and output files)",
|
||||
"FROM=TO",
|
||||
),
|
||||
opt::multi("", "env", "Inject an environment variable", "VAR=VALUE"),
|
||||
opt::multi("", "env-set", "Inject an environment variable", "VAR=VALUE"),
|
||||
]);
|
||||
opts
|
||||
}
|
||||
|
|
@ -2599,11 +2603,11 @@ fn parse_logical_env(
|
|||
) -> FxIndexMap<String, String> {
|
||||
let mut vars = FxIndexMap::default();
|
||||
|
||||
for arg in matches.opt_strs("env") {
|
||||
for arg in matches.opt_strs("env-set") {
|
||||
if let Some((name, val)) = arg.split_once('=') {
|
||||
vars.insert(name.to_string(), val.to_string());
|
||||
} else {
|
||||
early_dcx.early_fatal(format!("`--env`: specify value for variable `{arg}`"));
|
||||
early_dcx.early_fatal(format!("`--env-set`: specify value for variable `{arg}`"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1583,6 +1583,9 @@ options! {
|
|||
"version of DWARF debug information to emit (default: 2 or 4, depending on platform)"),
|
||||
dylib_lto: bool = (false, parse_bool, [UNTRACKED],
|
||||
"enables LTO for dylib crate type"),
|
||||
eagerly_emit_delayed_bugs: bool = (false, parse_bool, [UNTRACKED],
|
||||
"emit delayed bugs eagerly as errors instead of stashing them and emitting \
|
||||
them only if an error has not been emitted"),
|
||||
ehcont_guard: bool = (false, parse_bool, [TRACKED],
|
||||
"generate Windows EHCont Guard tables"),
|
||||
emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED],
|
||||
|
|
|
|||
|
|
@ -170,6 +170,34 @@ pub fn find_best_match_for_name(
|
|||
find_best_match_for_name_impl(false, candidates, lookup, dist)
|
||||
}
|
||||
|
||||
/// Find the best match for multiple words
|
||||
///
|
||||
/// This function is intended for use when the desired match would never be
|
||||
/// returned due to a substring in `lookup` which is superfluous.
|
||||
///
|
||||
/// For example, when looking for the closest lint name to `clippy:missing_docs`,
|
||||
/// we would find `clippy::erasing_op`, despite `missing_docs` existing and being a better suggestion.
|
||||
/// `missing_docs` would have a larger edit distance because it does not contain the `clippy` tool prefix.
|
||||
/// In order to find `missing_docs`, this function takes multiple lookup strings, computes the best match
|
||||
/// for each and returns the match which had the lowest edit distance. In our example, `clippy:missing_docs` and
|
||||
/// `missing_docs` would be `lookups`, enabling `missing_docs` to be the best match, as desired.
|
||||
pub fn find_best_match_for_names(
|
||||
candidates: &[Symbol],
|
||||
lookups: &[Symbol],
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
lookups
|
||||
.iter()
|
||||
.map(|s| (s, find_best_match_for_name_impl(false, candidates, *s, dist)))
|
||||
.filter_map(|(s, r)| r.map(|r| (s, r)))
|
||||
.min_by(|(s1, r1), (s2, r2)| {
|
||||
let d1 = edit_distance(s1.as_str(), r1.as_str(), usize::MAX).unwrap();
|
||||
let d2 = edit_distance(s2.as_str(), r2.as_str(), usize::MAX).unwrap();
|
||||
d1.cmp(&d2)
|
||||
})
|
||||
.map(|(_, r)| r)
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn find_best_match_for_name_impl(
|
||||
use_substring_score: bool,
|
||||
|
|
|
|||
|
|
@ -203,18 +203,19 @@ impl Hash for RealFileName {
|
|||
impl<S: Encoder> Encodable<S> for RealFileName {
|
||||
fn encode(&self, encoder: &mut S) {
|
||||
match *self {
|
||||
RealFileName::LocalPath(ref local_path) => encoder.emit_enum_variant(0, |encoder| {
|
||||
RealFileName::LocalPath(ref local_path) => {
|
||||
encoder.emit_u8(0);
|
||||
local_path.encode(encoder);
|
||||
}),
|
||||
}
|
||||
|
||||
RealFileName::Remapped { ref local_path, ref virtual_name } => encoder
|
||||
.emit_enum_variant(1, |encoder| {
|
||||
// For privacy and build reproducibility, we must not embed host-dependant path
|
||||
// in artifacts if they have been remapped by --remap-path-prefix
|
||||
assert!(local_path.is_none());
|
||||
local_path.encode(encoder);
|
||||
virtual_name.encode(encoder);
|
||||
}),
|
||||
RealFileName::Remapped { ref local_path, ref virtual_name } => {
|
||||
encoder.emit_u8(1);
|
||||
// For privacy and build reproducibility, we must not embed host-dependant path
|
||||
// in artifacts if they have been remapped by --remap-path-prefix
|
||||
assert!(local_path.is_none());
|
||||
local_path.encode(encoder);
|
||||
virtual_name.encode(encoder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3396,19 +3396,22 @@ impl Hash for TargetTriple {
|
|||
impl<S: Encoder> Encodable<S> for TargetTriple {
|
||||
fn encode(&self, s: &mut S) {
|
||||
match self {
|
||||
TargetTriple::TargetTriple(triple) => s.emit_enum_variant(0, |s| s.emit_str(triple)),
|
||||
TargetTriple::TargetJson { path_for_rustdoc: _, triple, contents } => s
|
||||
.emit_enum_variant(1, |s| {
|
||||
s.emit_str(triple);
|
||||
s.emit_str(contents)
|
||||
}),
|
||||
TargetTriple::TargetTriple(triple) => {
|
||||
s.emit_u8(0);
|
||||
s.emit_str(triple);
|
||||
}
|
||||
TargetTriple::TargetJson { path_for_rustdoc: _, triple, contents } => {
|
||||
s.emit_u8(1);
|
||||
s.emit_str(triple);
|
||||
s.emit_str(contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Decoder> Decodable<D> for TargetTriple {
|
||||
fn decode(d: &mut D) -> Self {
|
||||
match d.read_usize() {
|
||||
match d.read_u8() {
|
||||
0 => TargetTriple::TargetTriple(d.read_str().to_owned()),
|
||||
1 => TargetTriple::TargetJson {
|
||||
path_for_rustdoc: PathBuf::new(),
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
bitflags = "2.4.1"
|
||||
itertools = "0.11.0"
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_attr = { path = "../rustc_attr" }
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@
|
|||
#![feature(control_flow_enum)]
|
||||
#![feature(extract_if)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(option_take_if)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(never_type)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
|
|
|
|||
|
|
@ -171,7 +171,8 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> {
|
|||
let mut candidates = vec![];
|
||||
let last_eval_step = match self.evaluation.evaluation.kind {
|
||||
inspect::CanonicalGoalEvaluationKind::Overflow
|
||||
| inspect::CanonicalGoalEvaluationKind::CycleInStack => {
|
||||
| inspect::CanonicalGoalEvaluationKind::CycleInStack
|
||||
| inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => {
|
||||
warn!("unexpected root evaluation: {:?}", self.evaluation);
|
||||
return vec![];
|
||||
}
|
||||
|
|
|
|||
|
|
@ -118,6 +118,7 @@ pub(in crate::solve) enum WipGoalEvaluationKind<'tcx> {
|
|||
pub(in crate::solve) enum WipCanonicalGoalEvaluationKind<'tcx> {
|
||||
Overflow,
|
||||
CycleInStack,
|
||||
ProvisionalCacheHit,
|
||||
Interned { revisions: &'tcx [inspect::GoalEvaluationStep<'tcx>] },
|
||||
}
|
||||
|
||||
|
|
@ -126,6 +127,7 @@ impl std::fmt::Debug for WipCanonicalGoalEvaluationKind<'_> {
|
|||
match self {
|
||||
Self::Overflow => write!(f, "Overflow"),
|
||||
Self::CycleInStack => write!(f, "CycleInStack"),
|
||||
Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"),
|
||||
Self::Interned { revisions: _ } => f.debug_struct("Interned").finish_non_exhaustive(),
|
||||
}
|
||||
}
|
||||
|
|
@ -151,6 +153,9 @@ impl<'tcx> WipCanonicalGoalEvaluation<'tcx> {
|
|||
WipCanonicalGoalEvaluationKind::CycleInStack => {
|
||||
inspect::CanonicalGoalEvaluationKind::CycleInStack
|
||||
}
|
||||
WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => {
|
||||
inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit
|
||||
}
|
||||
WipCanonicalGoalEvaluationKind::Interned { revisions } => {
|
||||
inspect::CanonicalGoalEvaluationKind::Evaluation { revisions }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use rustc_infer::infer::InferCtxt;
|
|||
use rustc_infer::traits::TraitEngineExt;
|
||||
use rustc_infer::traits::{FulfillmentError, Obligation, TraitEngine};
|
||||
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
|
||||
use rustc_middle::traits::{ObligationCause, Reveal};
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::ty::{self, AliasTy, Ty, TyCtxt, UniverseIndex};
|
||||
use rustc_middle::ty::{FallibleTypeFolder, TypeFolder, TypeSuperFoldable};
|
||||
use rustc_middle::ty::{TypeFoldable, TypeVisitableExt};
|
||||
|
|
@ -52,14 +52,16 @@ struct NormalizationFolder<'me, 'tcx> {
|
|||
impl<'tcx> NormalizationFolder<'_, 'tcx> {
|
||||
fn normalize_alias_ty(
|
||||
&mut self,
|
||||
alias: AliasTy<'tcx>,
|
||||
alias_ty: Ty<'tcx>,
|
||||
) -> Result<Ty<'tcx>, Vec<FulfillmentError<'tcx>>> {
|
||||
assert!(matches!(alias_ty.kind(), ty::Alias(..)));
|
||||
|
||||
let infcx = self.at.infcx;
|
||||
let tcx = infcx.tcx;
|
||||
let recursion_limit = tcx.recursion_limit();
|
||||
if !recursion_limit.value_within_limit(self.depth) {
|
||||
self.at.infcx.err_ctxt().report_overflow_error(
|
||||
&alias.to_ty(tcx),
|
||||
&alias_ty,
|
||||
self.at.cause.span,
|
||||
true,
|
||||
|_| {},
|
||||
|
|
@ -76,7 +78,11 @@ impl<'tcx> NormalizationFolder<'_, 'tcx> {
|
|||
tcx,
|
||||
self.at.cause.clone(),
|
||||
self.at.param_env,
|
||||
ty::NormalizesTo { alias, term: new_infer_ty.into() },
|
||||
ty::PredicateKind::AliasRelate(
|
||||
alias_ty.into(),
|
||||
new_infer_ty.into(),
|
||||
ty::AliasRelationDirection::Equate,
|
||||
),
|
||||
);
|
||||
|
||||
// Do not emit an error if normalization is known to fail but instead
|
||||
|
|
@ -90,9 +96,12 @@ impl<'tcx> NormalizationFolder<'_, 'tcx> {
|
|||
return Err(errors);
|
||||
}
|
||||
let ty = infcx.resolve_vars_if_possible(new_infer_ty);
|
||||
ty.try_fold_with(self)?
|
||||
|
||||
// Alias is guaranteed to be fully structurally resolved,
|
||||
// so we can super fold here.
|
||||
ty.try_super_fold_with(self)?
|
||||
} else {
|
||||
alias.to_ty(tcx).try_super_fold_with(self)?
|
||||
alias_ty.try_super_fold_with(self)?
|
||||
};
|
||||
|
||||
self.depth -= 1;
|
||||
|
|
@ -170,24 +179,18 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
|
|||
}
|
||||
|
||||
fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
|
||||
let reveal = self.at.param_env.reveal();
|
||||
let infcx = self.at.infcx;
|
||||
debug_assert_eq!(ty, infcx.shallow_resolve(ty));
|
||||
if !needs_normalization(&ty, reveal) {
|
||||
if !ty.has_projections() {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// We don't normalize opaque types unless we have
|
||||
// `Reveal::All`, even if we're in the defining scope.
|
||||
let data = match *ty.kind() {
|
||||
ty::Alias(kind, alias_ty) if kind != ty::Opaque || reveal == Reveal::All => alias_ty,
|
||||
_ => return ty.try_super_fold_with(self),
|
||||
};
|
||||
let ty::Alias(..) = *ty.kind() else { return ty.try_super_fold_with(self) };
|
||||
|
||||
if data.has_escaping_bound_vars() {
|
||||
let (data, mapped_regions, mapped_types, mapped_consts) =
|
||||
BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, data);
|
||||
let result = ensure_sufficient_stack(|| self.normalize_alias_ty(data))?;
|
||||
if ty.has_escaping_bound_vars() {
|
||||
let (ty, mapped_regions, mapped_types, mapped_consts) =
|
||||
BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, ty);
|
||||
let result = ensure_sufficient_stack(|| self.normalize_alias_ty(ty))?;
|
||||
Ok(PlaceholderReplacer::replace_placeholders(
|
||||
infcx,
|
||||
mapped_regions,
|
||||
|
|
@ -197,7 +200,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
|
|||
result,
|
||||
))
|
||||
} else {
|
||||
ensure_sufficient_stack(|| self.normalize_alias_ty(data))
|
||||
ensure_sufficient_stack(|| self.normalize_alias_ty(ty))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,29 +11,100 @@ use rustc_middle::traits::solve::{CanonicalInput, Certainty, EvaluationCache, Qu
|
|||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::Limit;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::mem;
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
#[orderable]
|
||||
pub struct StackDepth {}
|
||||
}
|
||||
|
||||
bitflags::bitflags! {
|
||||
/// Whether and how this goal has been used as the root of a
|
||||
/// cycle. We track the kind of cycle as we're otherwise forced
|
||||
/// to always rerun at least once.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct HasBeenUsed: u8 {
|
||||
const INDUCTIVE_CYCLE = 1 << 0;
|
||||
const COINDUCTIVE_CYCLE = 1 << 1;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct StackEntry<'tcx> {
|
||||
input: CanonicalInput<'tcx>,
|
||||
|
||||
available_depth: Limit,
|
||||
// The maximum depth reached by this stack entry, only up-to date
|
||||
// for the top of the stack and lazily updated for the rest.
|
||||
|
||||
/// The maximum depth reached by this stack entry, only up-to date
|
||||
/// for the top of the stack and lazily updated for the rest.
|
||||
reached_depth: StackDepth,
|
||||
// In case of a cycle, the depth of the root.
|
||||
cycle_root_depth: StackDepth,
|
||||
|
||||
/// Whether this entry is a non-root cycle participant.
|
||||
///
|
||||
/// We must not move the result of non-root cycle participants to the
|
||||
/// global cache. See [SearchGraph::cycle_participants] for more details.
|
||||
/// We store the highest stack depth of a head of a cycle this goal is involved
|
||||
/// in. This necessary to soundly cache its provisional result.
|
||||
non_root_cycle_participant: Option<StackDepth>,
|
||||
|
||||
encountered_overflow: bool,
|
||||
has_been_used: bool,
|
||||
|
||||
has_been_used: HasBeenUsed,
|
||||
/// Starts out as `None` and gets set when rerunning this
|
||||
/// goal in case we encounter a cycle.
|
||||
provisional_result: Option<QueryResult<'tcx>>,
|
||||
}
|
||||
|
||||
/// The provisional result for a goal which is not on the stack.
|
||||
struct DetachedEntry<'tcx> {
|
||||
/// The head of the smallest non-trivial cycle involving this entry.
|
||||
///
|
||||
/// Given the following rules, when proving `A` the head for
|
||||
/// the provisional entry of `C` would be `B`.
|
||||
/// ```plain
|
||||
/// A :- B
|
||||
/// B :- C
|
||||
/// C :- A + B + C
|
||||
/// ```
|
||||
head: StackDepth,
|
||||
result: QueryResult<'tcx>,
|
||||
}
|
||||
|
||||
/// Stores the stack depth of a currently evaluated goal *and* already
|
||||
/// computed results for goals which depend on other goals still on the stack.
|
||||
///
|
||||
/// The provisional result may depend on whether the stack above it is inductive
|
||||
/// or coinductive. Because of this, we store separate provisional results for
|
||||
/// each case. If an provisional entry is not applicable, it may be the case
|
||||
/// that we already have provisional result while computing a goal. In this case
|
||||
/// we prefer the provisional result to potentially avoid fixpoint iterations.
|
||||
/// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example.
|
||||
///
|
||||
/// The provisional cache can theoretically result in changes to the observable behavior,
|
||||
/// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs.
|
||||
#[derive(Default)]
|
||||
struct ProvisionalCacheEntry<'tcx> {
|
||||
stack_depth: Option<StackDepth>,
|
||||
with_inductive_stack: Option<DetachedEntry<'tcx>>,
|
||||
with_coinductive_stack: Option<DetachedEntry<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> ProvisionalCacheEntry<'tcx> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.stack_depth.is_none()
|
||||
&& self.with_inductive_stack.is_none()
|
||||
&& self.with_coinductive_stack.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct SearchGraph<'tcx> {
|
||||
mode: SolverMode,
|
||||
local_overflow_limit: usize,
|
||||
/// The stack of goals currently being computed.
|
||||
///
|
||||
/// An element is *deeper* in the stack if its index is *lower*.
|
||||
stack: IndexVec<StackDepth, StackEntry<'tcx>>,
|
||||
provisional_cache: FxHashMap<CanonicalInput<'tcx>, ProvisionalCacheEntry<'tcx>>,
|
||||
/// We put only the root goal of a coinductive cycle into the global cache.
|
||||
///
|
||||
/// If we were to use that result when later trying to prove another cycle
|
||||
|
|
@ -44,23 +115,14 @@ struct StackEntry<'tcx> {
|
|||
cycle_participants: FxHashSet<CanonicalInput<'tcx>>,
|
||||
}
|
||||
|
||||
pub(super) struct SearchGraph<'tcx> {
|
||||
mode: SolverMode,
|
||||
local_overflow_limit: usize,
|
||||
/// The stack of goals currently being computed.
|
||||
///
|
||||
/// An element is *deeper* in the stack if its index is *lower*.
|
||||
stack: IndexVec<StackDepth, StackEntry<'tcx>>,
|
||||
stack_entries: FxHashMap<CanonicalInput<'tcx>, StackDepth>,
|
||||
}
|
||||
|
||||
impl<'tcx> SearchGraph<'tcx> {
|
||||
pub(super) fn new(tcx: TyCtxt<'tcx>, mode: SolverMode) -> SearchGraph<'tcx> {
|
||||
Self {
|
||||
mode,
|
||||
local_overflow_limit: tcx.recursion_limit().0.checked_ilog2().unwrap_or(0) as usize,
|
||||
stack: Default::default(),
|
||||
stack_entries: Default::default(),
|
||||
provisional_cache: Default::default(),
|
||||
cycle_participants: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -89,7 +151,6 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
/// would cause us to not track overflow and recursion depth correctly.
|
||||
fn pop_stack(&mut self) -> StackEntry<'tcx> {
|
||||
let elem = self.stack.pop().unwrap();
|
||||
assert!(self.stack_entries.remove(&elem.input).is_some());
|
||||
if let Some(last) = self.stack.raw.last_mut() {
|
||||
last.reached_depth = last.reached_depth.max(elem.reached_depth);
|
||||
last.encountered_overflow |= elem.encountered_overflow;
|
||||
|
|
@ -109,7 +170,13 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
}
|
||||
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.stack.is_empty()
|
||||
if self.stack.is_empty() {
|
||||
debug_assert!(self.provisional_cache.is_empty());
|
||||
debug_assert!(self.cycle_participants.is_empty());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn current_goal_is_normalizes_to(&self) -> bool {
|
||||
|
|
@ -146,6 +213,52 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn stack_coinductive_from(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
stack: &IndexVec<StackDepth, StackEntry<'tcx>>,
|
||||
head: StackDepth,
|
||||
) -> bool {
|
||||
stack.raw[head.index()..]
|
||||
.iter()
|
||||
.all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx))
|
||||
}
|
||||
|
||||
// When encountering a solver cycle, the result of the current goal
|
||||
// depends on goals lower on the stack.
|
||||
//
|
||||
// We have to therefore be careful when caching goals. Only the final result
|
||||
// of the cycle root, i.e. the lowest goal on the stack involved in this cycle,
|
||||
// is moved to the global cache while all others are stored in a provisional cache.
|
||||
//
|
||||
// We update both the head of this cycle to rerun its evaluation until
|
||||
// we reach a fixpoint and all other cycle participants to make sure that
|
||||
// their result does not get moved to the global cache.
|
||||
fn tag_cycle_participants(
|
||||
stack: &mut IndexVec<StackDepth, StackEntry<'tcx>>,
|
||||
cycle_participants: &mut FxHashSet<CanonicalInput<'tcx>>,
|
||||
usage_kind: HasBeenUsed,
|
||||
head: StackDepth,
|
||||
) {
|
||||
stack[head].has_been_used |= usage_kind;
|
||||
debug_assert!(!stack[head].has_been_used.is_empty());
|
||||
for entry in &mut stack.raw[head.index() + 1..] {
|
||||
entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head));
|
||||
cycle_participants.insert(entry.input);
|
||||
}
|
||||
}
|
||||
|
||||
fn clear_dependent_provisional_results(
|
||||
provisional_cache: &mut FxHashMap<CanonicalInput<'tcx>, ProvisionalCacheEntry<'tcx>>,
|
||||
head: StackDepth,
|
||||
) {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
provisional_cache.retain(|_, entry| {
|
||||
entry.with_coinductive_stack.take_if(|p| p.head == head);
|
||||
entry.with_inductive_stack.take_if(|p| p.head == head);
|
||||
!entry.is_empty()
|
||||
});
|
||||
}
|
||||
|
||||
/// Probably the most involved method of the whole solver.
|
||||
///
|
||||
/// Given some goal which is proven via the `prove_goal` closure, this
|
||||
|
|
@ -200,82 +313,79 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
return result;
|
||||
}
|
||||
|
||||
// Check whether we're in a cycle.
|
||||
match self.stack_entries.entry(input) {
|
||||
// Check whether the goal is in the provisional cache.
|
||||
// The provisional result may rely on the path to its cycle roots,
|
||||
// so we have to check the path of the current goal matches that of
|
||||
// the cache entry.
|
||||
let cache_entry = self.provisional_cache.entry(input).or_default();
|
||||
if let Some(entry) = cache_entry
|
||||
.with_coinductive_stack
|
||||
.as_ref()
|
||||
.filter(|p| Self::stack_coinductive_from(tcx, &self.stack, p.head))
|
||||
.or_else(|| {
|
||||
cache_entry
|
||||
.with_inductive_stack
|
||||
.as_ref()
|
||||
.filter(|p| !Self::stack_coinductive_from(tcx, &self.stack, p.head))
|
||||
})
|
||||
{
|
||||
// We have a nested goal which is already in the provisional cache, use
|
||||
// its result. We do not provide any usage kind as that should have been
|
||||
// already set correctly while computing the cache entry.
|
||||
inspect
|
||||
.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit);
|
||||
Self::tag_cycle_participants(
|
||||
&mut self.stack,
|
||||
&mut self.cycle_participants,
|
||||
HasBeenUsed::empty(),
|
||||
entry.head,
|
||||
);
|
||||
return entry.result;
|
||||
} else if let Some(stack_depth) = cache_entry.stack_depth {
|
||||
debug!("encountered cycle with depth {stack_depth:?}");
|
||||
// We have a nested goal which directly relies on a goal deeper in the stack.
|
||||
//
|
||||
// We start by tagging all cycle participants, as that's necessary for caching.
|
||||
//
|
||||
// Finally we can return either the provisional response or the initial response
|
||||
// in case we're in the first fixpoint iteration for this goal.
|
||||
inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack);
|
||||
let is_coinductive_cycle = Self::stack_coinductive_from(tcx, &self.stack, stack_depth);
|
||||
let usage_kind = if is_coinductive_cycle {
|
||||
HasBeenUsed::COINDUCTIVE_CYCLE
|
||||
} else {
|
||||
HasBeenUsed::INDUCTIVE_CYCLE
|
||||
};
|
||||
Self::tag_cycle_participants(
|
||||
&mut self.stack,
|
||||
&mut self.cycle_participants,
|
||||
usage_kind,
|
||||
stack_depth,
|
||||
);
|
||||
|
||||
// Return the provisional result or, if we're in the first iteration,
|
||||
// start with no constraints.
|
||||
return if let Some(result) = self.stack[stack_depth].provisional_result {
|
||||
result
|
||||
} else if is_coinductive_cycle {
|
||||
Self::response_no_constraints(tcx, input, Certainty::Yes)
|
||||
} else {
|
||||
Self::response_no_constraints(tcx, input, Certainty::OVERFLOW)
|
||||
};
|
||||
} else {
|
||||
// No entry, we push this goal on the stack and try to prove it.
|
||||
Entry::Vacant(v) => {
|
||||
let depth = self.stack.next_index();
|
||||
let entry = StackEntry {
|
||||
input,
|
||||
available_depth,
|
||||
reached_depth: depth,
|
||||
cycle_root_depth: depth,
|
||||
encountered_overflow: false,
|
||||
has_been_used: false,
|
||||
provisional_result: None,
|
||||
cycle_participants: Default::default(),
|
||||
};
|
||||
assert_eq!(self.stack.push(entry), depth);
|
||||
v.insert(depth);
|
||||
}
|
||||
// We have a nested goal which relies on a goal `root` deeper in the stack.
|
||||
//
|
||||
// We first store that we may have to reprove `root` in case the provisional
|
||||
// response is not equal to the final response. We also update the depth of all
|
||||
// goals which recursively depend on our current goal to depend on `root`
|
||||
// instead.
|
||||
//
|
||||
// Finally we can return either the provisional response for that goal if we have a
|
||||
// coinductive cycle or an ambiguous result if the cycle is inductive.
|
||||
Entry::Occupied(entry) => {
|
||||
inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack);
|
||||
|
||||
let stack_depth = *entry.get();
|
||||
debug!("encountered cycle with depth {stack_depth:?}");
|
||||
// We start by updating the root depth of all cycle participants, and
|
||||
// add all cycle participants to the root.
|
||||
let root_depth = self.stack[stack_depth].cycle_root_depth;
|
||||
let (prev, participants) = self.stack.raw.split_at_mut(stack_depth.as_usize() + 1);
|
||||
let root = &mut prev[root_depth.as_usize()];
|
||||
for entry in participants {
|
||||
debug_assert!(entry.cycle_root_depth >= root_depth);
|
||||
entry.cycle_root_depth = root_depth;
|
||||
root.cycle_participants.insert(entry.input);
|
||||
// FIXME(@lcnr): I believe that this line is needed as we could
|
||||
// otherwise access a cache entry for the root of a cycle while
|
||||
// computing the result for a cycle participant. This can result
|
||||
// in unstable results due to incompleteness.
|
||||
//
|
||||
// However, a test for this would be an even more complex version of
|
||||
// tests/ui/traits/next-solver/coinduction/incompleteness-unstable-result.rs.
|
||||
// I did not bother to write such a test and we have no regression test
|
||||
// for this. It would be good to have such a test :)
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
root.cycle_participants.extend(entry.cycle_participants.drain());
|
||||
}
|
||||
|
||||
// If we're in a cycle, we have to retry proving the cycle head
|
||||
// until we reach a fixpoint. It is not enough to simply retry the
|
||||
// `root` goal of this cycle.
|
||||
//
|
||||
// See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs
|
||||
// for an example.
|
||||
self.stack[stack_depth].has_been_used = true;
|
||||
return if let Some(result) = self.stack[stack_depth].provisional_result {
|
||||
result
|
||||
} else {
|
||||
// If we don't have a provisional result yet we're in the first iteration,
|
||||
// so we start with no constraints.
|
||||
let is_coinductive = self.stack.raw[stack_depth.index()..]
|
||||
.iter()
|
||||
.all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx));
|
||||
if is_coinductive {
|
||||
Self::response_no_constraints(tcx, input, Certainty::Yes)
|
||||
} else {
|
||||
Self::response_no_constraints(tcx, input, Certainty::OVERFLOW)
|
||||
}
|
||||
};
|
||||
}
|
||||
let depth = self.stack.next_index();
|
||||
let entry = StackEntry {
|
||||
input,
|
||||
available_depth,
|
||||
reached_depth: depth,
|
||||
non_root_cycle_participant: None,
|
||||
encountered_overflow: false,
|
||||
has_been_used: HasBeenUsed::empty(),
|
||||
provisional_result: None,
|
||||
};
|
||||
assert_eq!(self.stack.push(entry), depth);
|
||||
cache_entry.stack_depth = Some(depth);
|
||||
}
|
||||
|
||||
// This is for global caching, so we properly track query dependencies.
|
||||
|
|
@ -290,29 +400,58 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
// point we are done.
|
||||
for _ in 0..self.local_overflow_limit() {
|
||||
let result = prove_goal(self, inspect);
|
||||
|
||||
// Check whether the current goal is the root of a cycle and whether
|
||||
// we have to rerun because its provisional result differed from the
|
||||
// final result.
|
||||
let stack_entry = self.pop_stack();
|
||||
debug_assert_eq!(stack_entry.input, input);
|
||||
if stack_entry.has_been_used
|
||||
&& stack_entry.provisional_result.map_or(true, |r| r != result)
|
||||
{
|
||||
// If so, update its provisional result and reevaluate it.
|
||||
|
||||
// If the current goal is not the root of a cycle, we are done.
|
||||
if stack_entry.has_been_used.is_empty() {
|
||||
return (stack_entry, result);
|
||||
}
|
||||
|
||||
// If it is a cycle head, we have to keep trying to prove it until
|
||||
// we reach a fixpoint. We need to do so for all cycle heads,
|
||||
// not only for the root.
|
||||
//
|
||||
// See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs
|
||||
// for an example.
|
||||
|
||||
// Start by clearing all provisional cache entries which depend on this
|
||||
// the current goal.
|
||||
Self::clear_dependent_provisional_results(
|
||||
&mut self.provisional_cache,
|
||||
self.stack.next_index(),
|
||||
);
|
||||
|
||||
// Check whether we reached a fixpoint, either because the final result
|
||||
// is equal to the provisional result of the previous iteration, or because
|
||||
// this was only the root of either coinductive or inductive cycles, and the
|
||||
// final result is equal to the initial response for that case.
|
||||
let reached_fixpoint = if let Some(r) = stack_entry.provisional_result {
|
||||
r == result
|
||||
} else if stack_entry.has_been_used == HasBeenUsed::COINDUCTIVE_CYCLE {
|
||||
Self::response_no_constraints(tcx, input, Certainty::Yes) == result
|
||||
} else if stack_entry.has_been_used == HasBeenUsed::INDUCTIVE_CYCLE {
|
||||
Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) == result
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// If we did not reach a fixpoint, update the provisional result and reevaluate.
|
||||
if reached_fixpoint {
|
||||
return (stack_entry, result);
|
||||
} else {
|
||||
let depth = self.stack.push(StackEntry {
|
||||
has_been_used: false,
|
||||
has_been_used: HasBeenUsed::empty(),
|
||||
provisional_result: Some(result),
|
||||
..stack_entry
|
||||
});
|
||||
assert_eq!(self.stack_entries.insert(input, depth), None);
|
||||
} else {
|
||||
return (stack_entry, result);
|
||||
debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth));
|
||||
}
|
||||
}
|
||||
|
||||
debug!("canonical cycle overflow");
|
||||
let current_entry = self.pop_stack();
|
||||
debug_assert!(current_entry.has_been_used.is_empty());
|
||||
let result = Self::response_no_constraints(tcx, input, Certainty::OVERFLOW);
|
||||
(current_entry, result)
|
||||
});
|
||||
|
|
@ -322,26 +461,35 @@ impl<'tcx> SearchGraph<'tcx> {
|
|||
// We're now done with this goal. In case this goal is involved in a larger cycle
|
||||
// do not remove it from the provisional cache and update its provisional result.
|
||||
// We only add the root of cycles to the global cache.
|
||||
//
|
||||
// It is not possible for any nested goal to depend on something deeper on the
|
||||
// stack, as this would have also updated the depth of the current goal.
|
||||
if final_entry.cycle_root_depth == self.stack.next_index() {
|
||||
if let Some(head) = final_entry.non_root_cycle_participant {
|
||||
let coinductive_stack = Self::stack_coinductive_from(tcx, &self.stack, head);
|
||||
|
||||
let entry = self.provisional_cache.get_mut(&input).unwrap();
|
||||
entry.stack_depth = None;
|
||||
if coinductive_stack {
|
||||
entry.with_coinductive_stack = Some(DetachedEntry { head, result });
|
||||
} else {
|
||||
entry.with_inductive_stack = Some(DetachedEntry { head, result });
|
||||
}
|
||||
} else {
|
||||
self.provisional_cache.remove(&input);
|
||||
let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len();
|
||||
let cycle_participants = mem::take(&mut self.cycle_participants);
|
||||
// When encountering a cycle, both inductive and coinductive, we only
|
||||
// move the root into the global cache. We also store all other cycle
|
||||
// participants involved.
|
||||
//
|
||||
// We disable the global cache entry of the root goal if a cycle
|
||||
// We must not use the global cache entry of a root goal if a cycle
|
||||
// participant is on the stack. This is necessary to prevent unstable
|
||||
// results. See the comment of `StackEntry::cycle_participants` for
|
||||
// results. See the comment of `SearchGraph::cycle_participants` for
|
||||
// more details.
|
||||
let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len();
|
||||
self.global_cache(tcx).insert(
|
||||
tcx,
|
||||
input,
|
||||
proof_tree,
|
||||
reached_depth,
|
||||
final_entry.encountered_overflow,
|
||||
final_entry.cycle_participants,
|
||||
cycle_participants,
|
||||
dep_node,
|
||||
result,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ use rustc_middle::ty::{self, ImplSubject, Ty, TyCtxt, TypeVisitableExt};
|
|||
use rustc_middle::ty::{GenericArgs, GenericArgsRef};
|
||||
use rustc_session::lint::builtin::COHERENCE_LEAK_CHECK;
|
||||
use rustc_session::lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
|
||||
|
||||
use super::util;
|
||||
use super::SelectionContext;
|
||||
|
|
@ -258,7 +258,7 @@ fn fulfill_implication<'tcx>(
|
|||
pub(super) fn specialization_graph_provider(
|
||||
tcx: TyCtxt<'_>,
|
||||
trait_id: DefId,
|
||||
) -> specialization_graph::Graph {
|
||||
) -> Result<&'_ specialization_graph::Graph, ErrorGuaranteed> {
|
||||
let mut sg = specialization_graph::Graph::new();
|
||||
let overlap_mode = specialization_graph::OverlapMode::get(tcx, trait_id);
|
||||
|
||||
|
|
@ -271,6 +271,8 @@ pub(super) fn specialization_graph_provider(
|
|||
trait_impls
|
||||
.sort_unstable_by_key(|def_id| (-(def_id.krate.as_u32() as i64), def_id.index.index()));
|
||||
|
||||
let mut errored = Ok(());
|
||||
|
||||
for impl_def_id in trait_impls {
|
||||
if let Some(impl_def_id) = impl_def_id.as_local() {
|
||||
// This is where impl overlap checking happens:
|
||||
|
|
@ -283,15 +285,21 @@ pub(super) fn specialization_graph_provider(
|
|||
};
|
||||
|
||||
if let Some(overlap) = overlap {
|
||||
report_overlap_conflict(tcx, overlap, impl_def_id, used_to_be_allowed, &mut sg);
|
||||
errored = errored.and(report_overlap_conflict(
|
||||
tcx,
|
||||
overlap,
|
||||
impl_def_id,
|
||||
used_to_be_allowed,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id);
|
||||
sg.record_impl_from_cstore(tcx, parent, impl_def_id)
|
||||
}
|
||||
}
|
||||
errored?;
|
||||
|
||||
sg
|
||||
Ok(tcx.arena.alloc(sg))
|
||||
}
|
||||
|
||||
// This function is only used when
|
||||
|
|
@ -304,36 +312,31 @@ fn report_overlap_conflict<'tcx>(
|
|||
overlap: OverlapError<'tcx>,
|
||||
impl_def_id: LocalDefId,
|
||||
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>,
|
||||
sg: &mut specialization_graph::Graph,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let impl_polarity = tcx.impl_polarity(impl_def_id.to_def_id());
|
||||
let other_polarity = tcx.impl_polarity(overlap.with_impl);
|
||||
match (impl_polarity, other_polarity) {
|
||||
(ty::ImplPolarity::Negative, ty::ImplPolarity::Positive) => {
|
||||
report_negative_positive_conflict(
|
||||
Err(report_negative_positive_conflict(
|
||||
tcx,
|
||||
&overlap,
|
||||
impl_def_id,
|
||||
impl_def_id.to_def_id(),
|
||||
overlap.with_impl,
|
||||
sg,
|
||||
);
|
||||
))
|
||||
}
|
||||
|
||||
(ty::ImplPolarity::Positive, ty::ImplPolarity::Negative) => {
|
||||
report_negative_positive_conflict(
|
||||
Err(report_negative_positive_conflict(
|
||||
tcx,
|
||||
&overlap,
|
||||
impl_def_id,
|
||||
overlap.with_impl,
|
||||
impl_def_id.to_def_id(),
|
||||
sg,
|
||||
);
|
||||
))
|
||||
}
|
||||
|
||||
_ => {
|
||||
report_conflicting_impls(tcx, overlap, impl_def_id, used_to_be_allowed, sg);
|
||||
}
|
||||
_ => report_conflicting_impls(tcx, overlap, impl_def_id, used_to_be_allowed),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -343,16 +346,16 @@ fn report_negative_positive_conflict<'tcx>(
|
|||
local_impl_def_id: LocalDefId,
|
||||
negative_impl_def_id: DefId,
|
||||
positive_impl_def_id: DefId,
|
||||
sg: &mut specialization_graph::Graph,
|
||||
) {
|
||||
let err = tcx.dcx().create_err(NegativePositiveConflict {
|
||||
impl_span: tcx.def_span(local_impl_def_id),
|
||||
trait_desc: overlap.trait_ref,
|
||||
self_ty: overlap.self_ty,
|
||||
negative_impl_span: tcx.span_of_impl(negative_impl_def_id),
|
||||
positive_impl_span: tcx.span_of_impl(positive_impl_def_id),
|
||||
});
|
||||
sg.has_errored = Some(err.emit());
|
||||
) -> ErrorGuaranteed {
|
||||
tcx.dcx()
|
||||
.create_err(NegativePositiveConflict {
|
||||
impl_span: tcx.def_span(local_impl_def_id),
|
||||
trait_desc: overlap.trait_ref,
|
||||
self_ty: overlap.self_ty,
|
||||
negative_impl_span: tcx.span_of_impl(negative_impl_def_id),
|
||||
positive_impl_span: tcx.span_of_impl(positive_impl_def_id),
|
||||
})
|
||||
.emit()
|
||||
}
|
||||
|
||||
fn report_conflicting_impls<'tcx>(
|
||||
|
|
@ -360,8 +363,7 @@ fn report_conflicting_impls<'tcx>(
|
|||
overlap: OverlapError<'tcx>,
|
||||
impl_def_id: LocalDefId,
|
||||
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>,
|
||||
sg: &mut specialization_graph::Graph,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let impl_span = tcx.def_span(impl_def_id);
|
||||
|
||||
// Work to be done after we've built the DiagnosticBuilder. We have to define it
|
||||
|
|
@ -429,14 +431,11 @@ fn report_conflicting_impls<'tcx>(
|
|||
let mut err = tcx.dcx().struct_span_err(impl_span, msg);
|
||||
err.code(error_code!(E0119));
|
||||
decorate(tcx, &overlap, impl_span, &mut err);
|
||||
Some(err.emit())
|
||||
err.emit()
|
||||
} else {
|
||||
Some(
|
||||
tcx.dcx()
|
||||
.span_delayed_bug(impl_span, "impl should have failed the orphan check"),
|
||||
)
|
||||
tcx.dcx().span_delayed_bug(impl_span, "impl should have failed the orphan check")
|
||||
};
|
||||
sg.has_errored = reported;
|
||||
Err(reported)
|
||||
}
|
||||
Some(kind) => {
|
||||
let lint = match kind {
|
||||
|
|
@ -452,8 +451,9 @@ fn report_conflicting_impls<'tcx>(
|
|||
decorate(tcx, &overlap, impl_span, err);
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Recovers the "impl X for Y" signature from `impl_def_id` and returns it as a
|
||||
|
|
|
|||
|
|
@ -22,8 +22,7 @@ impl<'tcx> StructurallyNormalizeExt<'tcx> for At<'_, 'tcx> {
|
|||
assert!(!ty.is_ty_var(), "should have resolved vars before calling");
|
||||
|
||||
if self.infcx.next_trait_solver() {
|
||||
// FIXME(-Znext-solver): Should we resolve opaques here?
|
||||
let ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) = *ty.kind() else {
|
||||
let ty::Alias(..) = *ty.kind() else {
|
||||
return Ok(ty);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -963,7 +963,7 @@ pub enum PointerCoercion {
|
|||
/// Go from a safe fn pointer to an unsafe fn pointer.
|
||||
UnsafeFnPointer,
|
||||
|
||||
/// Go from a non-capturing closure to an fn pointer or an unsafe fn pointer.
|
||||
/// Go from a non-capturing closure to a fn pointer or an unsafe fn pointer.
|
||||
/// It cannot convert a closure that requires unsafe.
|
||||
ClosureFnPointer(Safety),
|
||||
|
||||
|
|
@ -1037,21 +1037,24 @@ impl Place {
|
|||
/// locals from the function body where this place originates from.
|
||||
pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
|
||||
let start_ty = locals[self.local].ty;
|
||||
self.projection.iter().fold(Ok(start_ty), |place_ty, elem| {
|
||||
let ty = place_ty?;
|
||||
match elem {
|
||||
ProjectionElem::Deref => Self::deref_ty(ty),
|
||||
ProjectionElem::Field(_idx, fty) => Ok(*fty),
|
||||
ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => {
|
||||
Self::index_ty(ty)
|
||||
}
|
||||
ProjectionElem::Subslice { from, to, from_end } => {
|
||||
Self::subslice_ty(ty, from, to, from_end)
|
||||
}
|
||||
ProjectionElem::Downcast(_) => Ok(ty),
|
||||
ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty),
|
||||
self.projection.iter().fold(Ok(start_ty), |place_ty, elem| elem.ty(place_ty?))
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectionElem {
|
||||
/// Get the expected type after applying this projection to a given place type.
|
||||
pub fn ty(&self, place_ty: Ty) -> Result<Ty, Error> {
|
||||
let ty = place_ty;
|
||||
match &self {
|
||||
ProjectionElem::Deref => Self::deref_ty(ty),
|
||||
ProjectionElem::Field(_idx, fty) => Ok(*fty),
|
||||
ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty),
|
||||
ProjectionElem::Subslice { from, to, from_end } => {
|
||||
Self::subslice_ty(ty, from, to, from_end)
|
||||
}
|
||||
})
|
||||
ProjectionElem::Downcast(_) => Ok(ty),
|
||||
ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty),
|
||||
}
|
||||
}
|
||||
|
||||
fn index_ty(ty: Ty) -> Result<Ty, Error> {
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@
|
|||
|
||||
use crate::mir::*;
|
||||
use crate::ty::{Const, GenericArgs, Region, Ty};
|
||||
use crate::{Opaque, Span};
|
||||
use crate::{Error, Opaque, Span};
|
||||
|
||||
pub trait MirVisitor {
|
||||
fn visit_body(&mut self, body: &Body) {
|
||||
|
|
@ -76,12 +76,14 @@ pub trait MirVisitor {
|
|||
self.super_place(place, ptx, location)
|
||||
}
|
||||
|
||||
fn visit_projection_elem(
|
||||
fn visit_projection_elem<'a>(
|
||||
&mut self,
|
||||
place_ref: PlaceRef<'a>,
|
||||
elem: &ProjectionElem,
|
||||
ptx: PlaceContext,
|
||||
location: Location,
|
||||
) {
|
||||
let _ = place_ref;
|
||||
self.super_projection_elem(elem, ptx, location);
|
||||
}
|
||||
|
||||
|
|
@ -284,8 +286,9 @@ pub trait MirVisitor {
|
|||
let _ = ptx;
|
||||
self.visit_local(&place.local, ptx, location);
|
||||
|
||||
for elem in &place.projection {
|
||||
self.visit_projection_elem(elem, ptx, location);
|
||||
for (idx, elem) in place.projection.iter().enumerate() {
|
||||
let place_ref = PlaceRef { local: place.local, projection: &place.projection[..idx] };
|
||||
self.visit_projection_elem(place_ref, elem, ptx, location);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -453,6 +456,19 @@ impl Location {
|
|||
}
|
||||
}
|
||||
|
||||
/// Reference to a place used to represent a partial projection.
|
||||
pub struct PlaceRef<'a> {
|
||||
pub local: Local,
|
||||
pub projection: &'a [ProjectionElem],
|
||||
}
|
||||
|
||||
impl<'a> PlaceRef<'a> {
|
||||
/// Get the type of this place.
|
||||
pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
|
||||
self.projection.iter().fold(Ok(locals[self.local].ty), |place_ty, elem| elem.ty(place_ty?))
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about a place's usage.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct PlaceContext {
|
||||
|
|
|
|||
|
|
@ -552,7 +552,6 @@ fn handle_reserve(result: Result<(), TryReserveError>) {
|
|||
// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
|
||||
// an extra guard for this in case we're running on a platform which can use
|
||||
// all 4GB in user-space, e.g., PAE or x32.
|
||||
|
||||
#[inline]
|
||||
fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
|
||||
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
|
||||
|
|
|
|||
|
|
@ -55,12 +55,7 @@ fn test_btree_map() {
|
|||
|
||||
require_send_sync(async {
|
||||
let _v = None::<
|
||||
alloc::collections::btree_map::ExtractIf<
|
||||
'_,
|
||||
&u32,
|
||||
&u32,
|
||||
fn(&&u32, &mut &u32) -> bool,
|
||||
>,
|
||||
alloc::collections::btree_map::ExtractIf<'_, &u32, &u32, fn(&&u32, &mut &u32) -> bool>,
|
||||
>;
|
||||
async {}.await;
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use core::alloc::{Allocator, Layout};
|
||||
use core::{assert_eq, assert_ne};
|
||||
use core::num::NonZeroUsize;
|
||||
use core::ptr::NonNull;
|
||||
use core::{assert_eq, assert_ne};
|
||||
use std::alloc::System;
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::borrow::Cow;
|
||||
|
|
@ -1212,7 +1212,7 @@ fn test_in_place_specialization_step_up_down() {
|
|||
assert_eq!(sink.len(), 2);
|
||||
|
||||
let mut src: Vec<[u8; 3]> = Vec::with_capacity(17);
|
||||
src.resize( 8, [0; 3]);
|
||||
src.resize(8, [0; 3]);
|
||||
let iter = src.into_iter().map(|[a, b, _]| [a, b]);
|
||||
assert_in_place_trait(&iter);
|
||||
let sink: Vec<[u8; 2]> = iter.collect();
|
||||
|
|
@ -1221,11 +1221,7 @@ fn test_in_place_specialization_step_up_down() {
|
|||
|
||||
let src = vec![[0u8; 4]; 256];
|
||||
let srcptr = src.as_ptr();
|
||||
let iter = src
|
||||
.into_iter()
|
||||
.flat_map(|a| {
|
||||
a.into_iter().map(|b| b.wrapping_add(1))
|
||||
});
|
||||
let iter = src.into_iter().flat_map(|a| a.into_iter().map(|b| b.wrapping_add(1)));
|
||||
assert_in_place_trait(&iter);
|
||||
let sink = iter.collect::<Vec<_>>();
|
||||
assert_eq!(srcptr as *const u8, sink.as_ptr());
|
||||
|
|
|
|||
99
library/core/benches/num/int_pow/mod.rs
Normal file
99
library/core/benches/num/int_pow/mod.rs
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
use rand::Rng;
|
||||
use test::{black_box, Bencher};
|
||||
|
||||
const ITERATIONS: usize = 128; // Uses an ITERATIONS * 20 Byte stack allocation
|
||||
type IntType = i128; // Hardest native type to multiply
|
||||
const EXPONENT_MAX: u32 = 31;
|
||||
const MAX_BASE: IntType = 17; // +-17 ** 31 <= IntType::MAX
|
||||
|
||||
macro_rules! pow_bench_template {
|
||||
($name:ident, $inner_macro:ident, $base_macro:ident) => {
|
||||
#[bench]
|
||||
fn $name(bench: &mut Bencher) {
|
||||
// Frequent black_box calls can add latency and prevent optimizations, so for
|
||||
// variable parameters we premake an array and pass the
|
||||
// reference through black_box outside of the loop.
|
||||
let mut rng = crate::bench_rng();
|
||||
let base_array: [IntType; ITERATIONS] =
|
||||
core::array::from_fn(|_| rng.gen_range((-MAX_BASE..=MAX_BASE)));
|
||||
let exp_array: [u32; ITERATIONS] =
|
||||
core::array::from_fn(|_| rng.gen_range((0..=EXPONENT_MAX)));
|
||||
|
||||
bench.iter(|| {
|
||||
#[allow(unused, unused_mut)]
|
||||
let mut base_iter = black_box(&base_array).into_iter();
|
||||
let mut exp_iter = black_box(&exp_array).into_iter();
|
||||
|
||||
(0..ITERATIONS).fold((0 as IntType, false), |acc, _| {
|
||||
// Sometimes constants don't propogate all the way to the
|
||||
// inside of the loop, so we call a custom expression every cycle
|
||||
// rather than iter::repeat(CONST)
|
||||
let base: IntType = $base_macro!(base_iter);
|
||||
let exp: u32 = *exp_iter.next().unwrap();
|
||||
|
||||
let r: (IntType, bool) = $inner_macro!(base, exp);
|
||||
(acc.0 ^ r.0, acc.1 ^ r.1)
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// This may panic if it overflows.
|
||||
macro_rules! inner_pow {
|
||||
($base:ident, $exp:ident) => {
|
||||
($base.pow($exp), false)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! inner_wrapping {
|
||||
($base:ident, $exp:ident) => {
|
||||
($base.wrapping_pow($exp), false)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! inner_overflowing {
|
||||
($base:ident, $exp:ident) => {
|
||||
$base.overflowing_pow($exp)
|
||||
};
|
||||
}
|
||||
|
||||
// This will panic if it overflows.
|
||||
macro_rules! inner_checked_unwrapped {
|
||||
($base:ident, $exp:ident) => {
|
||||
($base.checked_pow($exp).unwrap(), false)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! inner_saturating {
|
||||
($base:ident, $exp:ident) => {
|
||||
($base.saturating_pow($exp), false)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! make_const_base {
|
||||
($name:ident, $x:literal) => {
|
||||
macro_rules! $name {
|
||||
($iter:ident) => {
|
||||
$x
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_const_base!(const_base_m7, -7);
|
||||
make_const_base!(const_base_m8, -8);
|
||||
|
||||
macro_rules! variable_base {
|
||||
($iter:ident) => {
|
||||
*$iter.next().unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
pow_bench_template!(pow_variable, inner_pow, variable_base);
|
||||
pow_bench_template!(wrapping_pow_variable, inner_wrapping, variable_base);
|
||||
pow_bench_template!(overflowing_pow_variable, inner_overflowing, variable_base);
|
||||
pow_bench_template!(checked_pow_variable, inner_checked_unwrapped, variable_base);
|
||||
pow_bench_template!(saturating_pow_variable, inner_saturating, variable_base);
|
||||
pow_bench_template!(pow_m7, inner_pow, const_base_m7);
|
||||
pow_bench_template!(pow_m8, inner_pow, const_base_m8);
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
mod dec2flt;
|
||||
mod flt2dec;
|
||||
mod int_log;
|
||||
mod int_pow;
|
||||
|
||||
use std::str::FromStr;
|
||||
use test::{black_box, Bencher};
|
||||
|
|
|
|||
|
|
@ -1787,8 +1787,9 @@ extern "rust-intrinsic" {
|
|||
/// so this rounds half-way cases to the number with an even least significant digit.
|
||||
///
|
||||
/// May raise an inexact floating-point exception if the argument is not an integer.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so this is not something that
|
||||
/// can actually be used from Rust code.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so these exceptions
|
||||
/// cannot actually be utilized from Rust code.
|
||||
/// In other words, this intrinsic is equivalent in behavior to `nearbyintf32` and `roundevenf32`.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`f32::round_ties_even`](../../std/primitive.f32.html#method.round_ties_even)
|
||||
|
|
@ -1798,8 +1799,9 @@ extern "rust-intrinsic" {
|
|||
/// so this rounds half-way cases to the number with an even least significant digit.
|
||||
///
|
||||
/// May raise an inexact floating-point exception if the argument is not an integer.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so this is not something that
|
||||
/// can actually be used from Rust code.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so these exceptions
|
||||
/// cannot actually be utilized from Rust code.
|
||||
/// In other words, this intrinsic is equivalent in behavior to `nearbyintf64` and `roundevenf64`.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`f64::round_ties_even`](../../std/primitive.f64.html#method.round_ties_even)
|
||||
|
|
|
|||
|
|
@ -104,6 +104,18 @@ macro_rules! nonzero_integers {
|
|||
#[inline]
|
||||
#[rustc_const_stable(feature = "const_nonzero_get", since = "1.34.0")]
|
||||
pub const fn get(self) -> $Int {
|
||||
// FIXME: Remove this after LLVM supports `!range` metadata for function
|
||||
// arguments https://github.com/llvm/llvm-project/issues/76628
|
||||
//
|
||||
// Rustc can set range metadata only if it loads `self` from
|
||||
// memory somewhere. If the value of `self` was from by-value argument
|
||||
// of some not-inlined function, LLVM don't have range metadata
|
||||
// to understand that the value cannot be zero.
|
||||
|
||||
// SAFETY: It is an invariant of this type.
|
||||
unsafe {
|
||||
intrinsics::assume(self.0 != 0);
|
||||
}
|
||||
self.0
|
||||
}
|
||||
|
||||
|
|
@ -114,7 +126,9 @@ macro_rules! nonzero_integers {
|
|||
#[doc = concat!("Converts a `", stringify!($Ty), "` into an `", stringify!($Int), "`")]
|
||||
#[inline]
|
||||
fn from(nonzero: $Ty) -> Self {
|
||||
nonzero.0
|
||||
// Call nonzero to keep information range information
|
||||
// from get method.
|
||||
nonzero.get()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -233,7 +247,7 @@ macro_rules! nonzero_leading_trailing_zeros {
|
|||
#[inline]
|
||||
pub const fn leading_zeros(self) -> u32 {
|
||||
// SAFETY: since `self` cannot be zero, it is safe to call `ctlz_nonzero`.
|
||||
unsafe { intrinsics::ctlz_nonzero(self.0 as $Uint) as u32 }
|
||||
unsafe { intrinsics::ctlz_nonzero(self.get() as $Uint) as u32 }
|
||||
}
|
||||
|
||||
/// Returns the number of trailing zeros in the binary representation
|
||||
|
|
@ -257,7 +271,7 @@ macro_rules! nonzero_leading_trailing_zeros {
|
|||
#[inline]
|
||||
pub const fn trailing_zeros(self) -> u32 {
|
||||
// SAFETY: since `self` cannot be zero, it is safe to call `cttz_nonzero`.
|
||||
unsafe { intrinsics::cttz_nonzero(self.0 as $Uint) as u32 }
|
||||
unsafe { intrinsics::cttz_nonzero(self.get() as $Uint) as u32 }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -515,7 +529,7 @@ macro_rules! nonzero_unsigned_operations {
|
|||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn ilog10(self) -> u32 {
|
||||
super::int_log10::$Int(self.0)
|
||||
super::int_log10::$Int(self.get())
|
||||
}
|
||||
|
||||
/// Calculates the middle point of `self` and `rhs`.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use core::{array, assert_eq};
|
||||
use core::num::NonZeroUsize;
|
||||
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||
use core::{array, assert_eq};
|
||||
|
||||
#[test]
|
||||
fn array_from_ref() {
|
||||
|
|
|
|||
|
|
@ -466,14 +466,14 @@ fn const_cells() {
|
|||
const CELL: Cell<i32> = Cell::new(3);
|
||||
const _: i32 = CELL.into_inner();
|
||||
|
||||
/* FIXME(#110395)
|
||||
const UNSAFE_CELL_FROM: UnsafeCell<i32> = UnsafeCell::from(3);
|
||||
const _: i32 = UNSAFE_CELL.into_inner();
|
||||
/* FIXME(#110395)
|
||||
const UNSAFE_CELL_FROM: UnsafeCell<i32> = UnsafeCell::from(3);
|
||||
const _: i32 = UNSAFE_CELL.into_inner();
|
||||
|
||||
const REF_CELL_FROM: RefCell<i32> = RefCell::from(3);
|
||||
const _: i32 = REF_CELL.into_inner();
|
||||
const REF_CELL_FROM: RefCell<i32> = RefCell::from(3);
|
||||
const _: i32 = REF_CELL.into_inner();
|
||||
|
||||
const CELL_FROM: Cell<i32> = Cell::from(3);
|
||||
const _: i32 = CELL.into_inner();
|
||||
*/
|
||||
const CELL_FROM: Cell<i32> = Cell::from(3);
|
||||
const _: i32 = CELL.into_inner();
|
||||
*/
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use core::error::{request_value, request_ref, Request};
|
||||
use core::error::{request_ref, request_value, Request};
|
||||
|
||||
// Test the `Request` API.
|
||||
#[derive(Debug)]
|
||||
|
|
|
|||
|
|
@ -22,11 +22,11 @@ fn test_pointer_formats_data_pointer() {
|
|||
#[test]
|
||||
fn test_estimated_capacity() {
|
||||
assert_eq!(format_args!("").estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}", {""}).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}", { "" }).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("Hello").estimated_capacity(), 5);
|
||||
assert_eq!(format_args!("Hello, {}!", {""}).estimated_capacity(), 16);
|
||||
assert_eq!(format_args!("{}, hello!", {"World"}).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}. 16-bytes piece", {"World"}).estimated_capacity(), 32);
|
||||
assert_eq!(format_args!("Hello, {}!", { "" }).estimated_capacity(), 16);
|
||||
assert_eq!(format_args!("{}, hello!", { "World" }).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}. 16-bytes piece", { "World" }).estimated_capacity(), 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ impl Hasher for MyHasher {
|
|||
#[test]
|
||||
fn test_writer_hasher() {
|
||||
// FIXME(#110395)
|
||||
/* const */ fn hash<T: Hash>(t: &T) -> u64 {
|
||||
/* const */
|
||||
fn hash<T: Hash>(t: &T) -> u64 {
|
||||
let mut s = MyHasher { hash: 0 };
|
||||
t.hash(&mut s);
|
||||
s.finish()
|
||||
|
|
@ -140,7 +141,8 @@ impl Hash for Custom {
|
|||
#[test]
|
||||
fn test_custom_state() {
|
||||
// FIXME(#110395)
|
||||
/* const */ fn hash<T: Hash>(t: &T) -> u64 {
|
||||
/* const */
|
||||
fn hash<T: Hash>(t: &T) -> u64 {
|
||||
let mut c = CustomHasher { output: 0 };
|
||||
t.hash(&mut c);
|
||||
c.finish()
|
||||
|
|
|
|||
|
|
@ -42,7 +42,10 @@ fn test_iterator_chain_advance_by() {
|
|||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_by(xs.len() + i), Ok(()));
|
||||
assert_eq!(iter.next(), Some(&ys[i]));
|
||||
assert_eq!(iter.advance_by(100), Err(NonZeroUsize::new(100 - (ys.len() - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_by(100),
|
||||
Err(NonZeroUsize::new(100 - (ys.len() - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_by(0), Ok(()));
|
||||
}
|
||||
|
||||
|
|
@ -71,7 +74,10 @@ fn test_iterator_chain_advance_back_by() {
|
|||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back(), Some(&ys[ys.len() - i - 1]));
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (len - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (len - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_back_by(0), Ok(()));
|
||||
}
|
||||
|
||||
|
|
@ -79,7 +85,10 @@ fn test_iterator_chain_advance_back_by() {
|
|||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_back_by(ys.len() + i), Ok(()));
|
||||
assert_eq!(iter.next_back(), Some(&xs[xs.len() - i - 1]));
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (xs.len() - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (xs.len() - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_back_by(0), Ok(()));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use core::assert_eq;
|
||||
use super::*;
|
||||
use core::assert_eq;
|
||||
use core::iter::*;
|
||||
use core::num::NonZeroUsize;
|
||||
|
||||
|
|
|
|||
|
|
@ -245,7 +245,6 @@ fn test_step_by_skip() {
|
|||
assert_eq!((200..=255u8).step_by(10).nth(3), Some(230));
|
||||
}
|
||||
|
||||
|
||||
struct DeOpt<I: Iterator>(I);
|
||||
|
||||
impl<I: Iterator> Iterator for DeOpt<I> {
|
||||
|
|
@ -265,17 +264,15 @@ impl<I: DoubleEndedIterator> DoubleEndedIterator for DeOpt<I> {
|
|||
#[test]
|
||||
fn test_step_by_fold_range_specialization() {
|
||||
macro_rules! t {
|
||||
($range:expr, $var: ident, $body:tt) => {
|
||||
{
|
||||
// run the same tests for the non-optimized version
|
||||
let mut $var = DeOpt($range);
|
||||
$body
|
||||
}
|
||||
{
|
||||
let mut $var = $range;
|
||||
$body
|
||||
}
|
||||
($range:expr, $var: ident, $body:tt) => {{
|
||||
// run the same tests for the non-optimized version
|
||||
let mut $var = DeOpt($range);
|
||||
$body
|
||||
}
|
||||
{
|
||||
let mut $var = $range;
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
t!((1usize..5).step_by(1), r, {
|
||||
|
|
@ -288,13 +285,12 @@ fn test_step_by_fold_range_specialization() {
|
|||
assert_eq!(r.sum::<usize>(), 2);
|
||||
});
|
||||
|
||||
|
||||
t!((0usize..5).step_by(2), r, {
|
||||
assert_eq!(r.next(), Some(0));
|
||||
assert_eq!(r.sum::<usize>(), 6);
|
||||
});
|
||||
|
||||
t!((usize::MAX - 6 .. usize::MAX).step_by(5), r, {
|
||||
t!((usize::MAX - 6..usize::MAX).step_by(5), r, {
|
||||
assert_eq!(r.next(), Some(usize::MAX - 6));
|
||||
assert_eq!(r.sum::<usize>(), usize::MAX - 1);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -93,7 +93,10 @@ fn test_take_advance_by() {
|
|||
assert_eq!((0..2).take(1).advance_back_by(10), Err(NonZeroUsize::new(9).unwrap()));
|
||||
assert_eq!((0..0).take(1).advance_back_by(1), Err(NonZeroUsize::new(1).unwrap()));
|
||||
assert_eq!((0..0).take(1).advance_back_by(0), Ok(()));
|
||||
assert_eq!((0..usize::MAX).take(100).advance_back_by(usize::MAX), Err(NonZeroUsize::new(usize::MAX - 100).unwrap()));
|
||||
assert_eq!(
|
||||
(0..usize::MAX).take(100).advance_back_by(usize::MAX),
|
||||
Err(NonZeroUsize::new(usize::MAX - 100).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -184,7 +184,7 @@ fn test_zip_nested_sideffectful() {
|
|||
let it = xs.iter_mut().map(|x| *x = 1).enumerate().zip(&ys);
|
||||
it.count();
|
||||
}
|
||||
let length_aware = &xs == &[1, 1, 1, 1, 0, 0];
|
||||
let length_aware = &xs == &[1, 1, 1, 1, 0, 0];
|
||||
let probe_first = &xs == &[1, 1, 1, 1, 1, 0];
|
||||
|
||||
// either implementation is valid according to zip documentation
|
||||
|
|
|
|||
|
|
@ -168,7 +168,10 @@ fn test_iterator_advance_back_by() {
|
|||
let mut iter = v.iter();
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back().unwrap(), &v[v.len() - 1 - i]);
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(v.iter().advance_back_by(v.len()), Ok(()));
|
||||
|
|
@ -183,7 +186,10 @@ fn test_iterator_rev_advance_back_by() {
|
|||
let mut iter = v.iter().rev();
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back().unwrap(), &v[i]);
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(v.iter().rev().advance_back_by(v.len()), Ok(()));
|
||||
|
|
|
|||
|
|
@ -664,7 +664,11 @@ fn ipv6_properties() {
|
|||
&[0x20, 1, 0, 0x20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
global | unicast_global
|
||||
);
|
||||
check!("2001:30::", &[0x20, 1, 0, 0x30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], global | unicast_global);
|
||||
check!(
|
||||
"2001:30::",
|
||||
&[0x20, 1, 0, 0x30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
global | unicast_global
|
||||
);
|
||||
check!("2001:40::", &[0x20, 1, 0, 0x40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], unicast_global);
|
||||
|
||||
check!(
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue