Merge pull request #21541 from rust-lang/rustc-pull

Rustc pull update
This commit is contained in:
Laurențiu Nicola 2026-01-29 16:20:26 +00:00 committed by GitHub
commit a36549333a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
433 changed files with 18908 additions and 13963 deletions

2
.gitmodules vendored
View file

@ -25,7 +25,7 @@
[submodule "src/llvm-project"]
path = src/llvm-project
url = https://github.com/rust-lang/llvm-project.git
branch = rustc/21.1-2025-08-01
branch = rustc/22.1-2026-01-27
shallow = true
[submodule "src/doc/embedded-book"]
path = src/doc/embedded-book

View file

@ -184,9 +184,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "askama"
version = "0.15.1"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb7125972258312e79827b60c9eb93938334100245081cf701a2dee981b17427"
checksum = "03341eae1125472b0672fbf35cc9aa7b74cd8e0c3d02f02c28a04678f12aaa7a"
dependencies = [
"askama_macros",
"itoa",
@ -197,9 +197,9 @@ dependencies = [
[[package]]
name = "askama_derive"
version = "0.15.1"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ba5e7259a1580c61571e3116ebaaa01e3c001b2132b17c4cc5c70780ca3e994"
checksum = "461bd78f3da90b5e44eee4272cfb1c4832aa3dcdb6c370aedd3eb253d2b9e3ca"
dependencies = [
"askama_parser",
"basic-toml",
@ -214,18 +214,18 @@ dependencies = [
[[package]]
name = "askama_macros"
version = "0.15.1"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "236ce20b77cb13506eaf5024899f4af6e12e8825f390bd943c4c37fd8f322e46"
checksum = "ba49fb22ee3074574b8510abd9495d4f0bb9b8f87e8e45ee31e2cee508f7a8e5"
dependencies = [
"askama_derive",
]
[[package]]
name = "askama_parser"
version = "0.15.1"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c63392767bb2df6aa65a6e1e3b80fd89bb7af6d58359b924c0695620f1512e"
checksum = "7e33eb7484958aaa1f27e9adb556f5d557331cd891bdbb33781bc1f9550b6f6e"
dependencies = [
"rustc-hash 2.1.1",
"serde",
@ -4170,6 +4170,7 @@ version = "0.0.0"
dependencies = [
"bitflags",
"rustc_abi",
"rustc_apfloat",
"rustc_ast",
"rustc_ast_pretty",
"rustc_attr_parsing",
@ -6419,13 +6420,13 @@ dependencies = [
[[package]]
name = "windows-bindgen"
version = "0.61.1"
version = "0.66.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4e97b01190d32f268a2dfbd3f006f77840633746707fbe40bcee588108a231"
checksum = "81b7ec123a4eadd44d1f44f76804316b477b2537abed9a2ab950b3c54afa1fcf"
dependencies = [
"serde",
"serde_json",
"windows-threading 0.1.0",
"windows-threading 0.2.1",
]
[[package]]

View file

@ -53,7 +53,7 @@ path = [
]
precedence = "override"
SPDX-FileCopyrightText = "The Rust Project Developers (see https://thanks.rust-lang.org)"
SPDX-License-Identifier = "MIT or Apache-2.0"
SPDX-License-Identifier = "MIT OR Apache-2.0"
[[annotations]]
path = "compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp"

View file

@ -51,6 +51,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
| asm::InlineAsmArch::LoongArch32
| asm::InlineAsmArch::LoongArch64
| asm::InlineAsmArch::S390x
| asm::InlineAsmArch::PowerPC
| asm::InlineAsmArch::PowerPC64
);
if !is_stable
&& !self.tcx.features().asm_experimental_arch()

View file

@ -62,57 +62,23 @@ impl scc::Annotations<RegionVid> for SccAnnotations<'_, '_, RegionTracker> {
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
enum PlaceholderReachability {
/// This SCC reaches no placeholders.
NoPlaceholders,
/// This SCC reaches at least one placeholder.
Placeholders {
/// The largest-universed placeholder we can reach
max_universe: (UniverseIndex, RegionVid),
struct PlaceholderReachability {
/// The largest-universed placeholder we can reach
max_universe: (UniverseIndex, RegionVid),
/// The placeholder with the smallest ID
min_placeholder: RegionVid,
/// The placeholder with the smallest ID
min_placeholder: RegionVid,
/// The placeholder with the largest ID
max_placeholder: RegionVid,
},
/// The placeholder with the largest ID
max_placeholder: RegionVid,
}
impl PlaceholderReachability {
/// Merge the reachable placeholders of two graph components.
fn merge(self, other: PlaceholderReachability) -> PlaceholderReachability {
use PlaceholderReachability::*;
match (self, other) {
(NoPlaceholders, NoPlaceholders) => NoPlaceholders,
(NoPlaceholders, p @ Placeholders { .. })
| (p @ Placeholders { .. }, NoPlaceholders) => p,
(
Placeholders {
min_placeholder: min_pl,
max_placeholder: max_pl,
max_universe: max_u,
},
Placeholders { min_placeholder, max_placeholder, max_universe },
) => Placeholders {
min_placeholder: min_pl.min(min_placeholder),
max_placeholder: max_pl.max(max_placeholder),
max_universe: max_u.max(max_universe),
},
}
}
fn max_universe(&self) -> Option<(UniverseIndex, RegionVid)> {
match self {
Self::NoPlaceholders => None,
Self::Placeholders { max_universe, .. } => Some(*max_universe),
}
}
/// If we have reached placeholders, determine if they can
/// be named from this universe.
fn can_be_named_by(&self, from: UniverseIndex) -> bool {
self.max_universe()
.is_none_or(|(max_placeholder_universe, _)| from.can_name(max_placeholder_universe))
fn merge(&mut self, other: &Self) {
self.max_universe = self.max_universe.max(other.max_universe);
self.min_placeholder = self.min_placeholder.min(other.min_placeholder);
self.max_placeholder = self.max_placeholder.max(other.max_placeholder);
}
}
@ -120,7 +86,7 @@ impl PlaceholderReachability {
/// the values of its elements. This annotates a single SCC.
#[derive(Copy, Debug, Clone)]
pub(crate) struct RegionTracker {
reachable_placeholders: PlaceholderReachability,
reachable_placeholders: Option<PlaceholderReachability>,
/// The largest universe nameable from this SCC.
/// It is the smallest nameable universes of all
@ -135,13 +101,13 @@ impl RegionTracker {
pub(crate) fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self {
let reachable_placeholders =
if matches!(definition.origin, NllRegionVariableOrigin::Placeholder(_)) {
PlaceholderReachability::Placeholders {
Some(PlaceholderReachability {
max_universe: (definition.universe, rvid),
min_placeholder: rvid,
max_placeholder: rvid,
}
})
} else {
PlaceholderReachability::NoPlaceholders
None
};
Self {
@ -159,43 +125,46 @@ impl RegionTracker {
}
pub(crate) fn max_placeholder_universe_reached(self) -> UniverseIndex {
if let Some((universe, _)) = self.reachable_placeholders.max_universe() {
universe
} else {
UniverseIndex::ROOT
}
self.reachable_placeholders.map(|pls| pls.max_universe.0).unwrap_or(UniverseIndex::ROOT)
}
/// Can all reachable placeholders be named from `from`?
/// True vacuously in case no placeholders were reached.
fn placeholders_can_be_named_by(&self, from: UniverseIndex) -> bool {
self.reachable_placeholders.is_none_or(|pls| from.can_name(pls.max_universe.0))
}
/// Determine if we can name all the placeholders in `other`.
pub(crate) fn can_name_all_placeholders(&self, other: Self) -> bool {
other.reachable_placeholders.can_be_named_by(self.max_nameable_universe.0)
// HACK: We first check whether we can name the highest existential universe
// of `other`. This only exists to avoid errors in case that scc already
// depends on a placeholder it cannot name itself.
self.max_nameable_universe().can_name(other.max_nameable_universe())
|| other.placeholders_can_be_named_by(self.max_nameable_universe.0)
}
/// If this SCC reaches a placeholder it can't name, return it.
fn unnameable_placeholder(&self) -> Option<(UniverseIndex, RegionVid)> {
self.reachable_placeholders.max_universe().filter(|&(placeholder_universe, _)| {
!self.max_nameable_universe().can_name(placeholder_universe)
})
self.reachable_placeholders
.filter(|pls| !self.max_nameable_universe().can_name(pls.max_universe.0))
.map(|pls| pls.max_universe)
}
}
impl scc::Annotation for RegionTracker {
fn merge_scc(self, other: Self) -> Self {
fn update_scc(&mut self, other: &Self) {
trace!("{:?} << {:?}", self.representative, other.representative);
Self {
representative: self.representative.min(other.representative),
max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe),
reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders),
}
self.representative = self.representative.min(other.representative);
self.update_reachable(other);
}
fn merge_reached(self, other: Self) -> Self {
Self {
max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe),
reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders),
representative: self.representative,
}
fn update_reachable(&mut self, other: &Self) {
self.max_nameable_universe = self.max_nameable_universe.min(other.max_nameable_universe);
match (self.reachable_placeholders.as_mut(), other.reachable_placeholders.as_ref()) {
(None, None) | (Some(_), None) => (),
(None, Some(theirs)) => self.reachable_placeholders = Some(*theirs),
(Some(ours), Some(theirs)) => ours.merge(theirs),
};
}
}

View file

@ -275,7 +275,15 @@ fn load_binary_file(
}
};
match cx.source_map().load_binary_file(&resolved_path) {
Ok(data) => Ok(data),
Ok(data) => {
cx.sess
.psess
.file_depinfo
.borrow_mut()
.insert(Symbol::intern(&resolved_path.to_string_lossy()));
Ok(data)
}
Err(io_err) => {
let mut err = cx.dcx().struct_span_err(
macro_span,

View file

@ -1,6 +1,3 @@
// SPDX-License-Identifier: MIT OR Apache-2.0
// SPDX-FileCopyrightText: The Rust Project Developers (see https://thanks.rust-lang.org)
#![no_std]
#![feature(allocator_api, rustc_private)]

View file

@ -188,19 +188,6 @@ impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
load
}
}
fn memset(&mut self, ptr: &'ll Value, fill_byte: &'ll Value, size: &'ll Value, align: Align) {
unsafe {
llvm::LLVMRustBuildMemSet(
self.llbuilder,
ptr,
align.bytes() as c_uint,
fill_byte,
size,
false,
);
}
}
}
/// Empty string, to be used where LLVM expects an instruction name, indicating

View file

@ -19,8 +19,6 @@ pub(crate) struct OffloadGlobals<'ll> {
pub launcher_fn: &'ll llvm::Value,
pub launcher_ty: &'ll llvm::Type,
pub bin_desc: &'ll llvm::Type,
pub kernel_args_ty: &'ll llvm::Type,
pub offload_entry_ty: &'ll llvm::Type,
@ -31,8 +29,8 @@ pub(crate) struct OffloadGlobals<'ll> {
pub ident_t_global: &'ll llvm::Value,
pub register_lib: &'ll llvm::Value,
pub unregister_lib: &'ll llvm::Value,
// FIXME(offload): Drop this, once we fully automated our offload compilation pipeline, since
// LLVM will initialize them for us if it sees gpu kernels being registered.
pub init_rtls: &'ll llvm::Value,
}
@ -44,15 +42,6 @@ impl<'ll> OffloadGlobals<'ll> {
let (begin_mapper, _, end_mapper, mapper_fn_ty) = gen_tgt_data_mappers(cx);
let ident_t_global = generate_at_one(cx);
let tptr = cx.type_ptr();
let ti32 = cx.type_i32();
let tgt_bin_desc_ty = vec![ti32, tptr, tptr, tptr];
let bin_desc = cx.type_named_struct("struct.__tgt_bin_desc");
cx.set_struct_body(bin_desc, &tgt_bin_desc_ty, false);
let reg_lib_decl = cx.type_func(&[cx.type_ptr()], cx.type_void());
let register_lib = declare_offload_fn(&cx, "__tgt_register_lib", reg_lib_decl);
let unregister_lib = declare_offload_fn(&cx, "__tgt_unregister_lib", reg_lib_decl);
let init_ty = cx.type_func(&[], cx.type_void());
let init_rtls = declare_offload_fn(cx, "__tgt_init_all_rtls", init_ty);
@ -63,20 +52,84 @@ impl<'ll> OffloadGlobals<'ll> {
OffloadGlobals {
launcher_fn,
launcher_ty,
bin_desc,
kernel_args_ty,
offload_entry_ty,
begin_mapper,
end_mapper,
mapper_fn_ty,
ident_t_global,
register_lib,
unregister_lib,
init_rtls,
}
}
}
// We need to register offload before using it. We also should unregister it once we are done, for
// good measures. Previously we have done so before and after each individual offload intrinsic
// call, but that comes at a performance cost. The repeated (un)register calls might also confuse
// the LLVM ompOpt pass, which tries to move operations to a better location. The easiest solution,
// which we copy from clang, is to just have those two calls once, in the global ctor/dtor section
// of the final binary.
pub(crate) fn register_offload<'ll>(cx: &CodegenCx<'ll, '_>) {
// First we check quickly whether we already have done our setup, in which case we return early.
// Shouldn't be needed for correctness.
let register_lib_name = "__tgt_register_lib";
if cx.get_function(register_lib_name).is_some() {
return;
}
let reg_lib_decl = cx.type_func(&[cx.type_ptr()], cx.type_void());
let register_lib = declare_offload_fn(&cx, register_lib_name, reg_lib_decl);
let unregister_lib = declare_offload_fn(&cx, "__tgt_unregister_lib", reg_lib_decl);
let ptr_null = cx.const_null(cx.type_ptr());
let const_struct = cx.const_struct(&[cx.get_const_i32(0), ptr_null, ptr_null, ptr_null], false);
let omp_descriptor =
add_global(cx, ".omp_offloading.descriptor", const_struct, InternalLinkage);
// @.omp_offloading.descriptor = internal constant %__tgt_bin_desc { i32 1, ptr @.omp_offloading.device_images, ptr @__start_llvm_offload_entries, ptr @__stop_llvm_offload_entries }
// @.omp_offloading.descriptor = internal constant %__tgt_bin_desc { i32 0, ptr null, ptr null, ptr null }
let atexit = cx.type_func(&[cx.type_ptr()], cx.type_i32());
let atexit_fn = declare_offload_fn(cx, "atexit", atexit);
let desc_ty = cx.type_func(&[], cx.type_void());
let reg_name = ".omp_offloading.descriptor_reg";
let unreg_name = ".omp_offloading.descriptor_unreg";
let desc_reg_fn = declare_offload_fn(cx, reg_name, desc_ty);
let desc_unreg_fn = declare_offload_fn(cx, unreg_name, desc_ty);
llvm::set_linkage(desc_reg_fn, InternalLinkage);
llvm::set_linkage(desc_unreg_fn, InternalLinkage);
llvm::set_section(desc_reg_fn, c".text.startup");
llvm::set_section(desc_unreg_fn, c".text.startup");
// define internal void @.omp_offloading.descriptor_reg() section ".text.startup" {
// entry:
// call void @__tgt_register_lib(ptr @.omp_offloading.descriptor)
// %0 = call i32 @atexit(ptr @.omp_offloading.descriptor_unreg)
// ret void
// }
let bb = Builder::append_block(cx, desc_reg_fn, "entry");
let mut a = Builder::build(cx, bb);
a.call(reg_lib_decl, None, None, register_lib, &[omp_descriptor], None, None);
a.call(atexit, None, None, atexit_fn, &[desc_unreg_fn], None, None);
a.ret_void();
// define internal void @.omp_offloading.descriptor_unreg() section ".text.startup" {
// entry:
// call void @__tgt_unregister_lib(ptr @.omp_offloading.descriptor)
// ret void
// }
let bb = Builder::append_block(cx, desc_unreg_fn, "entry");
let mut a = Builder::build(cx, bb);
a.call(reg_lib_decl, None, None, unregister_lib, &[omp_descriptor], None, None);
a.ret_void();
// @llvm.global_ctors = appending global [1 x { i32, ptr, ptr }] [{ i32, ptr, ptr } { i32 101, ptr @.omp_offloading.descriptor_reg, ptr null }]
let args = vec![cx.get_const_i32(101), desc_reg_fn, ptr_null];
let const_struct = cx.const_struct(&args, false);
let arr = cx.const_array(cx.val_ty(const_struct), &[const_struct]);
add_global(cx, "llvm.global_ctors", arr, AppendingLinkage);
}
pub(crate) struct OffloadKernelDims<'ll> {
num_workgroups: &'ll Value,
threads_per_block: &'ll Value,
@ -487,9 +540,6 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
let tgt_decl = offload_globals.launcher_fn;
let tgt_target_kernel_ty = offload_globals.launcher_ty;
// %struct.__tgt_bin_desc = type { i32, ptr, ptr, ptr }
let tgt_bin_desc = offload_globals.bin_desc;
let tgt_kernel_decl = offload_globals.kernel_args_ty;
let begin_mapper_decl = offload_globals.begin_mapper;
let end_mapper_decl = offload_globals.end_mapper;
@ -513,12 +563,9 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
}
// Step 0)
// %struct.__tgt_bin_desc = type { i32, ptr, ptr, ptr }
// %6 = alloca %struct.__tgt_bin_desc, align 8
unsafe {
llvm::LLVMRustPositionBuilderPastAllocas(&builder.llbuilder, builder.llfn());
}
let tgt_bin_desc_alloca = builder.direct_alloca(tgt_bin_desc, Align::EIGHT, "EmptyDesc");
let ty = cx.type_array(cx.type_ptr(), num_args);
// Baseptr are just the input pointer to the kernel, stored in a local alloca
@ -536,7 +583,6 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
unsafe {
llvm::LLVMPositionBuilderAtEnd(&builder.llbuilder, bb);
}
builder.memset(tgt_bin_desc_alloca, cx.get_const_i8(0), cx.get_const_i64(32), Align::EIGHT);
// Now we allocate once per function param, a copy to be passed to one of our maps.
let mut vals = vec![];
@ -574,15 +620,9 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
geps.push(gep);
}
let mapper_fn_ty = cx.type_func(&[cx.type_ptr()], cx.type_void());
let register_lib_decl = offload_globals.register_lib;
let unregister_lib_decl = offload_globals.unregister_lib;
let init_ty = cx.type_func(&[], cx.type_void());
let init_rtls_decl = offload_globals.init_rtls;
// FIXME(offload): Later we want to add them to the wrapper code, rather than our main function.
// call void @__tgt_register_lib(ptr noundef %6)
builder.call(mapper_fn_ty, None, None, register_lib_decl, &[tgt_bin_desc_alloca], None, None);
// call void @__tgt_init_all_rtls()
builder.call(init_ty, None, None, init_rtls_decl, &[], None, None);
@ -679,6 +719,4 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
num_args,
s_ident_t,
);
builder.call(mapper_fn_ty, None, None, unregister_lib_decl, &[tgt_bin_desc_alloca], None, None);
}

View file

@ -124,6 +124,10 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
pub(crate) fn const_null(&self, t: &'ll Type) -> &'ll Value {
unsafe { llvm::LLVMConstNull(t) }
}
pub(crate) fn const_struct(&self, elts: &[&'ll Value], packed: bool) -> &'ll Value {
struct_in_context(self.llcx(), elts, packed)
}
}
impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {

View file

@ -30,7 +30,9 @@ use tracing::debug;
use crate::abi::FnAbiLlvmExt;
use crate::builder::Builder;
use crate::builder::autodiff::{adjust_activity_to_abi, generate_enzyme_call};
use crate::builder::gpu_offload::{OffloadKernelDims, gen_call_handling, gen_define_handling};
use crate::builder::gpu_offload::{
OffloadKernelDims, gen_call_handling, gen_define_handling, register_offload,
};
use crate::context::CodegenCx;
use crate::declare::declare_raw_fn;
use crate::errors::{
@ -1402,6 +1404,7 @@ fn codegen_offload<'ll, 'tcx>(
return;
}
};
register_offload(cx);
let offload_data = gen_define_handling(&cx, &metadata, target_symbol, offload_globals);
gen_call_handling(bx, &offload_data, &args, &types, &metadata, offload_globals, &offload_dims);
}

View file

@ -1208,10 +1208,23 @@ impl<'a> Linker for EmLinker<'a> {
fn set_output_kind(
&mut self,
_output_kind: LinkOutputKind,
output_kind: LinkOutputKind,
_crate_type: CrateType,
_out_filename: &Path,
) {
match output_kind {
LinkOutputKind::DynamicNoPicExe | LinkOutputKind::DynamicPicExe => {
self.cmd.arg("-sMAIN_MODULE=2");
}
LinkOutputKind::DynamicDylib | LinkOutputKind::StaticDylib => {
self.cmd.arg("-sSIDE_MODULE=2");
}
// -fno-pie is the default on Emscripten.
LinkOutputKind::StaticNoPicExe | LinkOutputKind::StaticPicExe => {}
LinkOutputKind::WasiReactorExe => {
unreachable!();
}
}
}
fn link_dylib_by_name(&mut self, name: &str, _verbatim: bool, _as_needed: bool) {

View file

@ -0,0 +1,178 @@
use rustc_middle::mir::interpret::{CtfeProvenance, InterpResult, Scalar, interp_ok};
use rustc_middle::ty::{Region, Ty};
use rustc_middle::{span_bug, ty};
use rustc_span::def_id::DefId;
use rustc_span::sym;
use crate::const_eval::CompileTimeMachine;
use crate::interpret::{Immediate, InterpCx, MPlaceTy, MemoryKind, Writeable};
impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> {
pub(crate) fn write_dyn_trait_type_info(
&mut self,
dyn_place: impl Writeable<'tcx, CtfeProvenance>,
data: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
region: Region<'tcx>,
) -> InterpResult<'tcx> {
let tcx = self.tcx.tcx;
// Find the principal trait ref (for super trait collection), collect auto traits,
// and collect all projection predicates (used when computing TypeId for each supertrait).
let mut principal: Option<ty::Binder<'tcx, ty::ExistentialTraitRef<'tcx>>> = None;
let mut auto_traits_def_ids: Vec<ty::Binder<'tcx, DefId>> = Vec::new();
let mut projections: Vec<ty::Binder<'tcx, ty::ExistentialProjection<'tcx>>> = Vec::new();
for b in data.iter() {
match b.skip_binder() {
ty::ExistentialPredicate::Trait(tr) => principal = Some(b.rebind(tr)),
ty::ExistentialPredicate::AutoTrait(did) => auto_traits_def_ids.push(b.rebind(did)),
ty::ExistentialPredicate::Projection(p) => projections.push(b.rebind(p)),
}
}
// This is to make principal dyn type include Trait and projection predicates, excluding auto traits.
let principal_ty: Option<Ty<'tcx>> = principal.map(|_tr| {
let preds = tcx
.mk_poly_existential_predicates_from_iter(data.iter().filter(|b| {
!matches!(b.skip_binder(), ty::ExistentialPredicate::AutoTrait(_))
}));
Ty::new_dynamic(tcx, preds, region)
});
// DynTrait { predicates: &'static [Trait] }
for (field_idx, field) in
dyn_place.layout().ty.ty_adt_def().unwrap().non_enum_variant().fields.iter_enumerated()
{
let field_place = self.project_field(&dyn_place, field_idx)?;
match field.name {
sym::predicates => {
self.write_dyn_trait_predicates_slice(
&field_place,
principal_ty,
&auto_traits_def_ids,
region,
)?;
}
other => {
span_bug!(self.tcx.def_span(field.did), "unimplemented DynTrait field {other}")
}
}
}
interp_ok(())
}
fn mk_dyn_principal_auto_trait_ty(
&self,
auto_trait_def_id: ty::Binder<'tcx, DefId>,
region: Region<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx.tcx;
// Preserve the binder vars from the original auto-trait predicate.
let pred_inner = ty::ExistentialPredicate::AutoTrait(auto_trait_def_id.skip_binder());
let pred = ty::Binder::bind_with_vars(pred_inner, auto_trait_def_id.bound_vars());
let preds = tcx.mk_poly_existential_predicates_from_iter([pred].into_iter());
Ty::new_dynamic(tcx, preds, region)
}
fn write_dyn_trait_predicates_slice(
&mut self,
slice_place: &impl Writeable<'tcx, CtfeProvenance>,
principal_ty: Option<Ty<'tcx>>,
auto_trait_def_ids: &[ty::Binder<'tcx, DefId>],
region: Region<'tcx>,
) -> InterpResult<'tcx> {
let tcx = self.tcx.tcx;
// total entries in DynTrait predicates
let total_len = principal_ty.map(|_| 1).unwrap_or(0) + auto_trait_def_ids.len();
// element type = DynTraitPredicate
let slice_ty = slice_place.layout().ty.builtin_deref(false).unwrap(); // [DynTraitPredicate]
let elem_ty = slice_ty.sequence_element_type(tcx); // DynTraitPredicate
let arr_layout = self.layout_of(Ty::new_array(tcx, elem_ty, total_len as u64))?;
let arr_place = self.allocate(arr_layout, MemoryKind::Stack)?;
let mut elems = self.project_array_fields(&arr_place)?;
// principal entry (if any) - NOT an auto trait
if let Some(principal_ty) = principal_ty {
let Some((_i, elem_place)) = elems.next(self)? else {
span_bug!(self.tcx.span, "DynTrait.predicates length computed wrong (principal)");
};
self.write_dyn_trait_predicate(elem_place, principal_ty, false)?;
}
// auto trait entries - these ARE auto traits
for auto in auto_trait_def_ids {
let Some((_i, elem_place)) = elems.next(self)? else {
span_bug!(self.tcx.span, "DynTrait.predicates length computed wrong (auto)");
};
let auto_ty = self.mk_dyn_principal_auto_trait_ty(*auto, region);
self.write_dyn_trait_predicate(elem_place, auto_ty, true)?;
}
let arr_place = arr_place.map_provenance(CtfeProvenance::as_immutable);
let imm = Immediate::new_slice(arr_place.ptr(), total_len as u64, self);
self.write_immediate(imm, slice_place)
}
fn write_dyn_trait_predicate(
&mut self,
predicate_place: MPlaceTy<'tcx>,
trait_ty: Ty<'tcx>,
is_auto: bool,
) -> InterpResult<'tcx> {
// DynTraitPredicate { trait_ty: Trait }
for (field_idx, field) in predicate_place
.layout
.ty
.ty_adt_def()
.unwrap()
.non_enum_variant()
.fields
.iter_enumerated()
{
let field_place = self.project_field(&predicate_place, field_idx)?;
match field.name {
sym::trait_ty => {
// Now write the Trait struct
self.write_trait(field_place, trait_ty, is_auto)?;
}
other => {
span_bug!(
self.tcx.def_span(field.did),
"unimplemented DynTraitPredicate field {other}"
)
}
}
}
interp_ok(())
}
fn write_trait(
&mut self,
trait_place: MPlaceTy<'tcx>,
trait_ty: Ty<'tcx>,
is_auto: bool,
) -> InterpResult<'tcx> {
// Trait { ty: TypeId, is_auto: bool }
for (field_idx, field) in
trait_place.layout.ty.ty_adt_def().unwrap().non_enum_variant().fields.iter_enumerated()
{
let field_place = self.project_field(&trait_place, field_idx)?;
match field.name {
sym::ty => {
self.write_type_id(trait_ty, &field_place)?;
}
sym::is_auto => {
self.write_scalar(Scalar::from_bool(is_auto), &field_place)?;
}
other => {
span_bug!(self.tcx.def_span(field.did), "unimplemented Trait field {other}")
}
}
}
interp_ok(())
}
}

View file

@ -9,6 +9,7 @@ use tracing::instrument;
use crate::interpret::InterpCx;
mod dummy_machine;
mod dyn_trait;
mod error;
mod eval_queries;
mod fn_queries;

View file

@ -129,13 +129,18 @@ impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> {
variant
}
ty::Dynamic(predicates, region) => {
let (variant, variant_place) = downcast(sym::DynTrait)?;
let dyn_place = self.project_field(&variant_place, FieldIdx::ZERO)?;
self.write_dyn_trait_type_info(dyn_place, *predicates, *region)?;
variant
}
ty::Adt(_, _)
| ty::Foreign(_)
| ty::Pat(_, _)
| ty::FnDef(..)
| ty::FnPtr(..)
| ty::UnsafeBinder(..)
| ty::Dynamic(..)
| ty::Closure(..)
| ty::CoroutineClosure(..)
| ty::Coroutine(..)

View file

@ -27,26 +27,18 @@ mod tests;
/// the max/min element of the SCC, or all of the above.
///
/// Concretely, the both merge operations must commute, e.g. where `merge`
/// is `merge_scc` and `merge_reached`: `a.merge(b) == b.merge(a)`
/// is `update_scc` and `update_reached`: `a.merge(b) == b.merge(a)`
///
/// In general, what you want is probably always min/max according
/// to some ordering, potentially with side constraints (min x such
/// that P holds).
pub trait Annotation: Debug + Copy {
/// Merge two existing annotations into one during
/// path compression.o
fn merge_scc(self, other: Self) -> Self;
/// path compression.
fn update_scc(&mut self, other: &Self);
/// Merge a successor into this annotation.
fn merge_reached(self, other: Self) -> Self;
fn update_scc(&mut self, other: Self) {
*self = self.merge_scc(other)
}
fn update_reachable(&mut self, other: Self) {
*self = self.merge_reached(other)
}
fn update_reachable(&mut self, other: &Self);
}
/// An accumulator for annotations.
@ -70,12 +62,8 @@ impl<N: Idx, S: Idx + Ord> Annotations<N> for NoAnnotations<S> {
/// The empty annotation, which does nothing.
impl Annotation for () {
fn merge_reached(self, _other: Self) -> Self {
()
}
fn merge_scc(self, _other: Self) -> Self {
()
}
fn update_reachable(&mut self, _other: &Self) {}
fn update_scc(&mut self, _other: &Self) {}
}
/// Strongly connected components (SCC) of a graph. The type `N` is
@ -614,7 +602,7 @@ where
*min_depth = successor_min_depth;
*min_cycle_root = successor_node;
}
current_component_annotation.update_scc(successor_annotation);
current_component_annotation.update_scc(&successor_annotation);
}
// The starting node `node` is succeeded by a fully identified SCC
// which is now added to the set under `scc_index`.
@ -629,7 +617,7 @@ where
// the `successors_stack` for later.
trace!(?node, ?successor_scc_index);
successors_stack.push(successor_scc_index);
current_component_annotation.update_reachable(successor_annotation);
current_component_annotation.update_reachable(&successor_annotation);
}
// `node` has no more (direct) successors; search recursively.
None => {

View file

@ -32,12 +32,12 @@ impl Maxes {
}
impl Annotation for MaxReached {
fn merge_scc(self, other: Self) -> Self {
Self(std::cmp::max(other.0, self.0))
fn update_scc(&mut self, other: &Self) {
self.0 = self.0.max(other.0);
}
fn merge_reached(self, other: Self) -> Self {
Self(std::cmp::max(other.0, self.0))
fn update_reachable(&mut self, other: &Self) {
self.0 = self.0.max(other.0);
}
}
@ -75,13 +75,12 @@ impl Annotations<usize> for MinMaxes {
}
impl Annotation for MinMaxIn {
fn merge_scc(self, other: Self) -> Self {
Self { min: std::cmp::min(self.min, other.min), max: std::cmp::max(self.max, other.max) }
fn update_scc(&mut self, other: &Self) {
self.min = self.min.min(other.min);
self.max = self.max.max(other.max);
}
fn merge_reached(self, _other: Self) -> Self {
self
}
fn update_reachable(&mut self, _other: &Self) {}
}
#[test]

View file

@ -849,6 +849,9 @@ pub struct SyntaxExtension {
/// Should debuginfo for the macro be collapsed to the outermost expansion site (in other
/// words, was the macro definition annotated with `#[collapse_debuginfo]`)?
pub collapse_debuginfo: bool,
/// Suppresses the "this error originates in the macro" note when a diagnostic points at this
/// macro.
pub hide_backtrace: bool,
}
impl SyntaxExtension {
@ -882,6 +885,7 @@ impl SyntaxExtension {
allow_internal_unsafe: false,
local_inner_macros: false,
collapse_debuginfo: false,
hide_backtrace: false,
}
}
@ -912,6 +916,12 @@ impl SyntaxExtension {
collapse_table[flag as usize][attr as usize]
}
fn get_hide_backtrace(attrs: &[hir::Attribute]) -> bool {
// FIXME(estebank): instead of reusing `#[rustc_diagnostic_item]` as a proxy, introduce a
// new attribute purely for this under the `#[diagnostic]` namespace.
ast::attr::find_by_name(attrs, sym::rustc_diagnostic_item).is_some()
}
/// Constructs a syntax extension with the given properties
/// and other properties converted from attributes.
pub fn new(
@ -948,6 +958,7 @@ impl SyntaxExtension {
// Not a built-in macro
None => (None, helper_attrs),
};
let hide_backtrace = builtin_name.is_some() || Self::get_hide_backtrace(attrs);
let stability = find_attr!(attrs, AttributeKind::Stability { stability, .. } => *stability);
@ -982,6 +993,7 @@ impl SyntaxExtension {
allow_internal_unsafe,
local_inner_macros,
collapse_debuginfo,
hide_backtrace,
}
}
@ -1061,7 +1073,7 @@ impl SyntaxExtension {
self.allow_internal_unsafe,
self.local_inner_macros,
self.collapse_debuginfo,
self.builtin_name.is_some(),
self.hide_backtrace,
)
}
}

View file

@ -458,13 +458,11 @@ impl<'a, 'b> Rustc<'a, 'b> {
}
}
impl server::Types for Rustc<'_, '_> {
impl server::Server for Rustc<'_, '_> {
type TokenStream = TokenStream;
type Span = Span;
type Symbol = Symbol;
}
impl server::Server for Rustc<'_, '_> {
fn globals(&mut self) -> ExpnGlobals<Self::Span> {
ExpnGlobals {
def_site: self.def_site,

View file

@ -3281,6 +3281,63 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
/// Checks if we can suggest a derive macro for the unmet trait bound.
/// Returns Some(list_of_derives) if possible, or None if not.
fn consider_suggesting_derives_for_ty(
&self,
trait_pred: ty::TraitPredicate<'tcx>,
adt: ty::AdtDef<'tcx>,
) -> Option<Vec<(String, Span, Symbol)>> {
let diagnostic_name = self.tcx.get_diagnostic_name(trait_pred.def_id())?;
let can_derive = match diagnostic_name {
sym::Default
| sym::Eq
| sym::PartialEq
| sym::Ord
| sym::PartialOrd
| sym::Clone
| sym::Copy
| sym::Hash
| sym::Debug => true,
_ => false,
};
if !can_derive {
return None;
}
let trait_def_id = trait_pred.def_id();
let self_ty = trait_pred.self_ty();
// We need to check if there is already a manual implementation of the trait
// for this specific ADT to avoid suggesting `#[derive(..)]` that would conflict.
if self.tcx.non_blanket_impls_for_ty(trait_def_id, self_ty).any(|impl_def_id| {
self.tcx
.type_of(impl_def_id)
.instantiate_identity()
.ty_adt_def()
.is_some_and(|def| def.did() == adt.did())
}) {
return None;
}
let mut derives = Vec::new();
let self_name = self_ty.to_string();
let self_span = self.tcx.def_span(adt.did());
for super_trait in supertraits(self.tcx, ty::Binder::dummy(trait_pred.trait_ref)) {
if let Some(parent_diagnostic_name) = self.tcx.get_diagnostic_name(super_trait.def_id())
{
derives.push((self_name.clone(), self_span, parent_diagnostic_name));
}
}
derives.push((self_name, self_span, diagnostic_name));
Some(derives)
}
fn note_predicate_source_and_get_derives(
&self,
err: &mut Diag<'_>,
@ -3298,35 +3355,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some(adt) if adt.did().is_local() => adt,
_ => continue,
};
if let Some(diagnostic_name) = self.tcx.get_diagnostic_name(trait_pred.def_id()) {
let can_derive = match diagnostic_name {
sym::Default
| sym::Eq
| sym::PartialEq
| sym::Ord
| sym::PartialOrd
| sym::Clone
| sym::Copy
| sym::Hash
| sym::Debug => true,
_ => false,
};
if can_derive {
let self_name = trait_pred.self_ty().to_string();
let self_span = self.tcx.def_span(adt.did());
for super_trait in
supertraits(self.tcx, ty::Binder::dummy(trait_pred.trait_ref))
{
if let Some(parent_diagnostic_name) =
self.tcx.get_diagnostic_name(super_trait.def_id())
{
derives.push((self_name.clone(), self_span, parent_diagnostic_name));
}
}
derives.push((self_name, self_span, diagnostic_name));
} else {
traits.push(trait_pred.def_id());
}
if let Some(new_derives) = self.consider_suggesting_derives_for_ty(trait_pred, adt) {
derives.extend(new_derives);
} else {
traits.push(trait_pred.def_id());
}

View file

@ -9,6 +9,7 @@ use rustc_ast::{self as ast, CRATE_NODE_ID};
use rustc_attr_parsing::{AttributeParser, Early, ShouldEmit};
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_codegen_ssa::{CodegenResults, CrateInfo};
use rustc_data_structures::indexmap::IndexMap;
use rustc_data_structures::jobserver::Proxy;
use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal};
@ -584,7 +585,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
let result: io::Result<()> = try {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let mut files: Vec<(String, u64, Option<SourceFileHash>)> = sess
let mut files: IndexMap<String, (u64, Option<SourceFileHash>)> = sess
.source_map()
.files()
.iter()
@ -593,10 +594,12 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
.map(|fmap| {
(
escape_dep_filename(&fmap.name.prefer_local_unconditionally().to_string()),
// This needs to be unnormalized,
// as external tools wouldn't know how rustc normalizes them
fmap.unnormalized_source_len as u64,
fmap.checksum_hash,
(
// This needs to be unnormalized,
// as external tools wouldn't know how rustc normalizes them
fmap.unnormalized_source_len as u64,
fmap.checksum_hash,
),
)
})
.collect();
@ -614,7 +617,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
fn hash_iter_files<P: AsRef<Path>>(
it: impl Iterator<Item = P>,
checksum_hash_algo: Option<SourceFileHashAlgorithm>,
) -> impl Iterator<Item = (P, u64, Option<SourceFileHash>)> {
) -> impl Iterator<Item = (P, (u64, Option<SourceFileHash>))> {
it.map(move |path| {
match checksum_hash_algo.and_then(|algo| {
fs::File::open(path.as_ref())
@ -630,8 +633,8 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
})
.ok()
}) {
Some((file_len, checksum)) => (path, file_len, Some(checksum)),
None => (path, 0, None),
Some((file_len, checksum)) => (path, (file_len, Some(checksum))),
None => (path, (0, None)),
}
})
}
@ -705,18 +708,14 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
file,
"{}: {}\n",
path.display(),
files
.iter()
.map(|(path, _file_len, _checksum_hash_algo)| path.as_str())
.intersperse(" ")
.collect::<String>()
files.keys().map(String::as_str).intersperse(" ").collect::<String>()
)?;
}
// Emit a fake target for each input file to the compilation. This
// prevents `make` from spitting out an error if a file is later
// deleted. For more info see #28735
for (path, _file_len, _checksum_hash_algo) in &files {
for path in files.keys() {
writeln!(file, "{path}:")?;
}
@ -745,7 +744,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
if sess.opts.unstable_opts.checksum_hash_algorithm().is_some() {
files
.iter()
.filter_map(|(path, file_len, hash_algo)| {
.filter_map(|(path, (file_len, hash_algo))| {
hash_algo.map(|hash_algo| (path, file_len, hash_algo))
})
.try_for_each(|(path, file_len, checksum_hash)| {

View file

@ -7,6 +7,7 @@ edition = "2024"
# tidy-alphabetical-start
bitflags = "2.4.1"
rustc_abi = { path = "../rustc_abi" }
rustc_apfloat = "0.2.0"
rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_attr_parsing = { path = "../rustc_attr_parsing" }

View file

@ -1,10 +1,12 @@
use hir::{ExprKind, Node};
use rustc_abi::{Integer, Size};
use rustc_apfloat::Float;
use rustc_apfloat::ieee::{DoubleS, HalfS, IeeeFloat, QuadS, Semantics, SingleS};
use rustc_hir::{HirId, attrs};
use rustc_middle::ty::Ty;
use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::{bug, ty};
use rustc_span::Span;
use rustc_span::{Span, Symbol};
use {rustc_ast as ast, rustc_hir as hir};
use crate::LateContext;
@ -383,6 +385,13 @@ fn lint_uint_literal<'tcx>(
}
}
/// `None` if `v` does not parse as the float type, otherwise indicates whether a literal rounds
/// to infinity.
fn float_is_infinite<S: Semantics>(v: Symbol) -> Option<bool> {
let x: IeeeFloat<S> = v.as_str().parse().ok()?;
Some(x.is_infinite())
}
pub(crate) fn lint_literal<'tcx>(
cx: &LateContext<'tcx>,
type_limits: &TypeLimits,
@ -405,18 +414,18 @@ pub(crate) fn lint_literal<'tcx>(
lint_uint_literal(cx, hir_id, span, lit, t)
}
ty::Float(t) => {
let (is_infinite, sym) = match lit.node {
ast::LitKind::Float(v, _) => match t {
// FIXME(f16_f128): add this check once `is_infinite` is reliable (ABI
// issues resolved).
ty::FloatTy::F16 => (Ok(false), v),
ty::FloatTy::F32 => (v.as_str().parse().map(f32::is_infinite), v),
ty::FloatTy::F64 => (v.as_str().parse().map(f64::is_infinite), v),
ty::FloatTy::F128 => (Ok(false), v),
},
_ => bug!(),
let ast::LitKind::Float(v, _) = lit.node else {
bug!();
};
if is_infinite == Ok(true) {
let is_infinite = match t {
ty::FloatTy::F16 => float_is_infinite::<HalfS>(v),
ty::FloatTy::F32 => float_is_infinite::<SingleS>(v),
ty::FloatTy::F64 => float_is_infinite::<DoubleS>(v),
ty::FloatTy::F128 => float_is_infinite::<QuadS>(v),
};
if is_infinite == Some(true) {
cx.emit_span_lint(
OVERFLOWING_LITERALS,
span,
@ -426,7 +435,7 @@ pub(crate) fn lint_literal<'tcx>(
.sess()
.source_map()
.span_to_snippet(lit.span)
.unwrap_or_else(|_| sym.to_string()),
.unwrap_or_else(|_| v.to_string()),
},
);
}

View file

@ -3,8 +3,8 @@
use std::ffi::OsStr;
use rustc_ast::tokenstream::TokenStream;
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId};
use rustc_hir::hir_id::{HirId, OwnerId};
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId};
use rustc_hir::hir_id::OwnerId;
use rustc_query_system::dep_graph::DepNodeIndex;
use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache};
use rustc_span::{DUMMY_SP, Ident, LocalExpnId, Span, Symbol};
@ -12,7 +12,7 @@ use rustc_span::{DUMMY_SP, Ident, LocalExpnId, Span, Symbol};
use crate::infer::canonical::CanonicalQueryInput;
use crate::mir::mono::CollectionMode;
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::layout::{TyAndLayout, ValidityRequirement};
use crate::ty::layout::ValidityRequirement;
use crate::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
use crate::{mir, traits};
@ -29,15 +29,7 @@ pub trait Key: Sized {
/// constraint is not enforced here.
///
/// [`QueryCache`]: rustc_query_system::query::QueryCache
// N.B. Most of the keys down below have `type Cache<V> = DefaultCache<Self, V>;`,
// it would be reasonable to use associated type defaults, to remove the duplication...
//
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer
// return types of queries which is very annoying. Thus, until r-a support associated
// type defaults, please restrain from using them here <3
//
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
type Cache<V>;
type Cache<V> = DefaultCache<Self, V>;
/// In the event that a cycle occurs, if no explicit span has been
/// given for a query with key `self`, what span should we use?
@ -72,49 +64,30 @@ impl Key for () {
}
impl<'tcx> Key for ty::InstanceKind<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
impl<'tcx> AsLocalKey for ty::InstanceKind<'tcx> {
type LocalKey = Self;
#[inline(always)]
fn as_local_key(&self) -> Option<Self::LocalKey> {
self.def_id().is_local().then(|| *self)
}
}
impl<'tcx> Key for ty::Instance<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
impl<'tcx> Key for mir::interpret::GlobalId<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.instance.default_span(tcx)
}
}
impl<'tcx> Key for (Ty<'tcx>, Option<ty::ExistentialTraitRef<'tcx>>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
@ -184,8 +157,6 @@ impl AsLocalKey for DefId {
}
impl Key for LocalModDefId {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(*self)
}
@ -196,79 +167,19 @@ impl Key for LocalModDefId {
}
}
impl Key for ModDefId {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(*self)
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
Some(self.to_def_id())
}
}
impl AsLocalKey for ModDefId {
type LocalKey = LocalModDefId;
#[inline(always)]
fn as_local_key(&self) -> Option<Self::LocalKey> {
self.as_local()
}
}
impl Key for SimplifiedType {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl Key for (DefId, DefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
}
impl<'tcx> Key for (ty::Instance<'tcx>, LocalDefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl Key for (DefId, LocalDefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
}
impl Key for (LocalDefId, DefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl Key for (LocalDefId, LocalDefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl Key for (DefId, Ident) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.0)
}
@ -280,16 +191,12 @@ impl Key for (DefId, Ident) {
}
impl Key for (LocalDefId, LocalDefId, Ident) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
}
impl Key for (CrateNum, DefId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
@ -305,8 +212,6 @@ impl AsLocalKey for (CrateNum, DefId) {
}
impl Key for (CrateNum, SimplifiedType) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
@ -321,121 +226,37 @@ impl AsLocalKey for (CrateNum, SimplifiedType) {
}
}
impl Key for (DefId, SimplifiedType) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl Key for (DefId, ty::SizedTraitKind) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for GenericArgsRef<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (DefId, GenericArgsRef<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for (ty::UnevaluatedConst<'tcx>, ty::UnevaluatedConst<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
(self.0).def.default_span(tcx)
}
}
impl<'tcx> Key for (LocalDefId, DefId, GenericArgsRef<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for (ty::ParamEnv<'tcx>, ty::TraitRef<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.1.def_id)
}
}
impl<'tcx> Key for ty::ParamEnvAnd<'tcx, Ty<'tcx>> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for ty::TraitRef<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id)
}
}
impl<'tcx> Key for ty::PolyTraitRef<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
impl<'tcx> Key for ty::PolyExistentialTraitRef<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
impl<'tcx> Key for (ty::PolyTraitRef<'tcx>, ty::PolyTraitRef<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.0.def_id())
}
}
impl<'tcx> Key for GenericArg<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for ty::Const<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for Ty<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
@ -449,41 +270,19 @@ impl<'tcx> Key for Ty<'tcx> {
}
}
impl<'tcx> Key for TyAndLayout<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (Ty<'tcx>, Ty<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for ty::Clauses<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for ty::ParamEnv<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx, T: Key> Key for ty::PseudoCanonicalInput<'tcx, T> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.value.default_span(tcx)
}
@ -494,24 +293,18 @@ impl<'tcx, T: Key> Key for ty::PseudoCanonicalInput<'tcx, T> {
}
impl Key for Symbol {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl Key for Option<Symbol> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for &'tcx OsStr {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
@ -520,119 +313,54 @@ impl<'tcx> Key for &'tcx OsStr {
/// Canonical query goals correspond to abstract trait operations that
/// are not tied to any crate in particular.
impl<'tcx, T: Clone> Key for CanonicalQueryInput<'tcx, T> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx, T: Clone> Key for (CanonicalQueryInput<'tcx, T>, bool) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl Key for (Symbol, u32, u32) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (DefId, Ty<'tcx>, GenericArgsRef<'tcx>, ty::ParamEnv<'tcx>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (Ty<'tcx>, rustc_abi::VariantIdx) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (ty::Predicate<'tcx>, traits::WellFormedLoc) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for ty::Value<'tcx> {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl Key for HirId {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.hir_span(*self)
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
None
}
}
impl Key for (LocalDefId, HirId) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.hir_span(self.1)
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
Some(self.0.into())
}
}
impl<'tcx> Key for (LocalExpnId, &'tcx TokenStream) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
self.0.expn_data().call_site
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
None
}
}
impl<'tcx> Key for (ValidityRequirement, ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) {
type Cache<V> = DefaultCache<Self, V>;
// Just forward to `Ty<'tcx>`
fn default_span(&self, _: TyCtxt<'_>) -> Span {
@ -648,8 +376,6 @@ impl<'tcx> Key for (ValidityRequirement, ty::PseudoCanonicalInput<'tcx, Ty<'tcx>
}
impl<'tcx> Key for (ty::Instance<'tcx>, CollectionMode) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}

View file

@ -88,7 +88,7 @@ use rustc_index::IndexVec;
use rustc_lint_defs::LintId;
use rustc_macros::rustc_queries;
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{QueryMode, QueryState};
use rustc_query_system::query::{QueryMode, QueryStackDeferred, QueryState};
use rustc_session::Limits;
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};
use rustc_session::cstore::{
@ -122,7 +122,7 @@ use crate::mir::mono::{
CodegenUnit, CollectionMode, MonoItem, MonoItemPartitions, NormalizationErrorInMono,
};
use crate::query::erase::{Erase, erase, restore};
use crate::query::plumbing::{CyclePlaceholder, DynamicQuery};
use crate::query::plumbing::CyclePlaceholder;
use crate::traits::query::{
CanonicalAliasGoal, CanonicalDropckOutlivesGoal, CanonicalImpliedOutlivesBoundsGoal,
CanonicalMethodAutoderefStepsGoal, CanonicalPredicateGoal, CanonicalTypeOpAscribeUserTypeGoal,

View file

@ -14,11 +14,14 @@ use crate::dep_graph;
use crate::dep_graph::DepKind;
use crate::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache};
use crate::query::{
DynamicQueries, ExternProviders, Providers, QueryArenas, QueryCaches, QueryEngine, QueryStates,
ExternProviders, PerQueryVTables, Providers, QueryArenas, QueryCaches, QueryEngine, QueryStates,
};
use crate::ty::TyCtxt;
pub struct DynamicQuery<'tcx, C: QueryCache> {
/// Stores function pointers and other metadata for a particular query.
///
/// Used indirectly by query plumbing in `rustc_query_system`, via a trait.
pub struct QueryVTable<'tcx, C: QueryCache> {
pub name: &'static str,
pub eval_always: bool,
pub dep_kind: DepKind,
@ -62,7 +65,7 @@ pub struct QuerySystem<'tcx> {
pub states: QueryStates<'tcx>,
pub arenas: WorkerLocal<QueryArenas<'tcx>>,
pub caches: QueryCaches<'tcx>,
pub dynamic_queries: DynamicQueries<'tcx>,
pub query_vtables: PerQueryVTables<'tcx>,
/// This provides access to the incremental compilation on-disk cache for query results.
/// Do not access this directly. It is only meant to be used by
@ -418,16 +421,19 @@ macro_rules! define_callbacks {
})*
}
pub struct DynamicQueries<'tcx> {
/// Holds a `QueryVTable` for each query.
///
/// ("Per" just makes this pluralized name more visually distinct.)
pub struct PerQueryVTables<'tcx> {
$(
pub $name: DynamicQuery<'tcx, queries::$name::Storage<'tcx>>,
pub $name: ::rustc_middle::query::plumbing::QueryVTable<'tcx, queries::$name::Storage<'tcx>>,
)*
}
#[derive(Default)]
pub struct QueryStates<'tcx> {
$(
pub $name: QueryState<$($K)*>,
pub $name: QueryState<$($K)*, QueryStackDeferred<'tcx>>,
)*
}

View file

@ -159,9 +159,7 @@ pub macro with_types_for_signature($e:expr) {{
/// Avoids running any queries during prints.
pub macro with_no_queries($e:expr) {{
$crate::ty::print::with_reduced_queries!($crate::ty::print::with_forced_impl_filename_line!(
$crate::ty::print::with_no_trimmed_paths!($crate::ty::print::with_no_visible_paths!(
$crate::ty::print::with_forced_impl_filename_line!($e)
))
$crate::ty::print::with_no_trimmed_paths!($crate::ty::print::with_no_visible_paths!($e))
))
}}

View file

@ -88,7 +88,7 @@ impl<'tcx> Value<TyCtxt<'tcx>> for Representability {
if info.query.dep_kind == dep_kinds::representability
&& let Some(field_id) = info.query.def_id
&& let Some(field_id) = field_id.as_local()
&& let Some(DefKind::Field) = info.query.def_kind
&& let Some(DefKind::Field) = info.query.info.def_kind
{
let parent_id = tcx.parent(field_id.to_def_id());
let item_id = match tcx.def_kind(parent_id) {
@ -224,7 +224,7 @@ impl<'tcx, T> Value<TyCtxt<'tcx>> for Result<T, &'_ ty::layout::LayoutError<'_>>
continue;
};
let frame_span =
frame.query.default_span(cycle[(i + 1) % cycle.len()].span);
frame.query.info.default_span(cycle[(i + 1) % cycle.len()].span);
if frame_span.is_dummy() {
continue;
}

View file

@ -18,6 +18,7 @@ use rustc_const_eval::check_consts::{ConstCx, qualifs};
use rustc_data_structures::assert_matches;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
@ -329,6 +330,7 @@ impl<'tcx> Validator<'_, 'tcx> {
if let TempState::Defined { location: loc, .. } = self.temps[local]
&& let Left(statement) = self.body.stmt_at(loc)
&& let Some((_, Rvalue::Use(Operand::Constant(c)))) = statement.kind.as_assign()
&& self.should_evaluate_for_promotion_checks(c.const_)
&& let Some(idx) = c.const_.try_eval_target_usize(self.tcx, self.typing_env)
// Determine the type of the thing we are indexing.
&& let ty::Array(_, len) = place_base.ty(self.body, self.tcx).ty.kind()
@ -483,40 +485,33 @@ impl<'tcx> Validator<'_, 'tcx> {
if lhs_ty.is_integral() {
let sz = lhs_ty.primitive_size(self.tcx);
// Integer division: the RHS must be a non-zero const.
let rhs_val = match rhs {
Operand::Constant(c) => {
c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
}
_ => None,
};
match rhs_val.map(|x| x.to_uint(sz)) {
let rhs_val = if let Operand::Constant(rhs_c) = rhs
&& self.should_evaluate_for_promotion_checks(rhs_c.const_)
&& let Some(rhs_val) =
rhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
// for the zero test, int vs uint does not matter
Some(x) if x != 0 => {} // okay
_ => return Err(Unpromotable), // value not known or 0 -- not okay
}
&& rhs_val.to_uint(sz) != 0
{
rhs_val
} else {
// value not known or 0 -- not okay
return Err(Unpromotable);
};
// Furthermore, for signed division, we also have to exclude `int::MIN /
// -1`.
if lhs_ty.is_signed() {
match rhs_val.map(|x| x.to_int(sz)) {
Some(-1) | None => {
// The RHS is -1 or unknown, so we have to be careful.
// But is the LHS int::MIN?
let lhs_val = match lhs {
Operand::Constant(c) => c
.const_
.try_eval_scalar_int(self.tcx, self.typing_env),
_ => None,
};
let lhs_min = sz.signed_int_min();
match lhs_val.map(|x| x.to_int(sz)) {
// okay
Some(x) if x != lhs_min => {}
// value not known or int::MIN -- not okay
_ => return Err(Unpromotable),
}
}
_ => {}
if lhs_ty.is_signed() && rhs_val.to_int(sz) == -1 {
// The RHS is -1, so we have to be careful. But is the LHS int::MIN?
if let Operand::Constant(lhs_c) = lhs
&& self.should_evaluate_for_promotion_checks(lhs_c.const_)
&& let Some(lhs_val) =
lhs_c.const_.try_eval_scalar_int(self.tcx, self.typing_env)
&& let lhs_min = sz.signed_int_min()
&& lhs_val.to_int(sz) != lhs_min
{
// okay
} else {
// value not known or int::MIN -- not okay
return Err(Unpromotable);
}
}
}
@ -683,6 +678,28 @@ impl<'tcx> Validator<'_, 'tcx> {
// This passed all checks, so let's accept.
Ok(())
}
/// Can we try to evaluate a given constant at this point in compilation? Attempting to evaluate
/// a const block before borrow-checking will result in a query cycle (#150464).
fn should_evaluate_for_promotion_checks(&self, constant: Const<'tcx>) -> bool {
match constant {
// `Const::Ty` is always a `ConstKind::Param` right now and that can never be turned
// into a mir value for promotion
// FIXME(mgca): do we want uses of type_const to be normalized during promotion?
Const::Ty(..) => false,
Const::Val(..) => true,
// Evaluating a MIR constant requires borrow-checking it. For inline consts, as of
// #138499, this means borrow-checking its typeck root. Since borrow-checking the
// typeck root requires promoting its constants, trying to evaluate an inline const here
// will result in a query cycle. To avoid the cycle, we can't evaluate const blocks yet.
// Other kinds of unevaluated's can cause query cycles too when they arise from
// self-reference in user code; e.g. evaluating a constant can require evaluating a
// const function that uses that constant, again requiring evaluation of the constant.
// However, this form of cycle renders both the constant and function unusable in
// general, so we don't need to special-case it here.
Const::Unevaluated(uc, _) => self.tcx.def_kind(uc.def) != DefKind::InlineConst,
}
}
}
fn validate_candidates(

View file

@ -2760,9 +2760,13 @@ impl<'a> Parser<'a> {
let (mut cond, _) =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
CondChecker::new(self, let_chains_policy).visit_expr(&mut cond);
Ok(cond)
let mut checker = CondChecker::new(self, let_chains_policy);
checker.visit_expr(&mut cond);
Ok(if let Some(guar) = checker.found_incorrect_let_chain {
self.mk_expr_err(cond.span, guar)
} else {
cond
})
}
/// Parses a `let $pat = $expr` pseudo-expression.
@ -3484,13 +3488,19 @@ impl<'a> Parser<'a> {
let if_span = self.prev_token.span;
let mut cond = self.parse_match_guard_condition()?;
CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
let mut checker = CondChecker::new(self, LetChainsPolicy::AlwaysAllowed);
checker.visit_expr(&mut cond);
if has_let_expr(&cond) {
let span = if_span.to(cond.span);
self.psess.gated_spans.gate(sym::if_let_guard, span);
}
Ok(Some(cond))
Ok(Some(if let Some(guar) = checker.found_incorrect_let_chain {
self.mk_expr_err(cond.span, guar)
} else {
cond
}))
}
fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (Pat, Option<Box<Expr>>)> {
@ -3511,13 +3521,23 @@ impl<'a> Parser<'a> {
let ast::PatKind::Paren(subpat) = pat.kind else { unreachable!() };
let ast::PatKind::Guard(_, mut cond) = subpat.kind else { unreachable!() };
self.psess.gated_spans.ungate_last(sym::guard_patterns, cond.span);
CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
let mut checker = CondChecker::new(self, LetChainsPolicy::AlwaysAllowed);
checker.visit_expr(&mut cond);
let right = self.prev_token.span;
self.dcx().emit_err(errors::ParenthesesInMatchPat {
span: vec![left, right],
sugg: errors::ParenthesesInMatchPatSugg { left, right },
});
Ok((self.mk_pat(span, ast::PatKind::Wild), Some(cond)))
Ok((
self.mk_pat(span, ast::PatKind::Wild),
(if let Some(guar) = checker.found_incorrect_let_chain {
Some(self.mk_expr_err(cond.span, guar))
} else {
Some(cond)
}),
))
} else {
Ok((pat, self.parse_match_arm_guard()?))
}
@ -4208,6 +4228,7 @@ struct CondChecker<'a> {
forbid_let_reason: Option<ForbiddenLetReason>,
missing_let: Option<errors::MaybeMissingLet>,
comparison: Option<errors::MaybeComparison>,
found_incorrect_let_chain: Option<ErrorGuaranteed>,
}
impl<'a> CondChecker<'a> {
@ -4218,6 +4239,7 @@ impl<'a> CondChecker<'a> {
missing_let: None,
comparison: None,
let_chains_policy,
found_incorrect_let_chain: None,
depth: 0,
}
}
@ -4236,12 +4258,19 @@ impl MutVisitor for CondChecker<'_> {
NotSupportedOr(or_span) => {
self.parser.dcx().emit_err(errors::OrInLetChain { span: or_span })
}
_ => self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
span,
reason,
missing_let: self.missing_let,
comparison: self.comparison,
}),
_ => {
let guar =
self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
span,
reason,
missing_let: self.missing_let,
comparison: self.comparison,
});
if let Some(_) = self.missing_let {
self.found_incorrect_let_chain = Some(guar);
}
guar
}
};
*recovered = Recovered::Yes(error);
} else if self.depth > 1 {

View file

@ -758,7 +758,7 @@ impl<'input> Parser<'input> {
}
/// Parses a word starting at the current position. A word is the same as a
/// Rust identifier, except that it can't start with `_` character.
/// Rust identifier or keyword, except that it can't be a bare `_` character.
fn word(&mut self) -> &'input str {
let index = self.input_vec_index;
match self.peek() {

View file

@ -188,10 +188,27 @@ pub enum VariantsShape {
tag: Scalar,
tag_encoding: TagEncoding,
tag_field: usize,
variants: Vec<LayoutShape>,
variants: Vec<VariantFields>,
},
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
pub struct VariantFields {
/// Offsets for the first byte of each field,
/// ordered to match the source definition order.
/// I.e.: It follows the same order as [super::ty::VariantDef::fields()].
/// This vector does not go in increasing order.
pub offsets: Vec<Size>,
}
impl VariantFields {
pub fn fields_by_offset_order(&self) -> Vec<FieldIdx> {
let mut indices = (0..self.offsets.len()).collect::<Vec<_>>();
indices.sort_by_key(|idx| self.offsets[*idx]);
indices
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
pub enum TagEncoding {
/// The tag directly stores the discriminant, but possibly with a smaller layout

View file

@ -11,7 +11,7 @@ use rustc_target::callconv;
use crate::abi::{
AddressSpace, ArgAbi, CallConvention, FieldsShape, FloatLength, FnAbi, IntegerLength,
IntegerType, Layout, LayoutShape, PassMode, Primitive, ReprFlags, ReprOptions, Scalar,
TagEncoding, TyAndLayout, ValueAbi, VariantsShape, WrappingRange,
TagEncoding, TyAndLayout, ValueAbi, VariantFields, VariantsShape, WrappingRange,
};
use crate::compiler_interface::BridgeTys;
use crate::target::MachineSize as Size;
@ -213,7 +213,15 @@ impl<'tcx> Stable<'tcx> for rustc_abi::Variants<rustc_abi::FieldIdx, rustc_abi::
tag: tag.stable(tables, cx),
tag_encoding: tag_encoding.stable(tables, cx),
tag_field: tag_field.stable(tables, cx),
variants: variants.iter().as_slice().stable(tables, cx),
variants: variants
.iter()
.map(|v| match &v.fields {
rustc_abi::FieldsShape::Arbitrary { offsets, .. } => VariantFields {
offsets: offsets.iter().as_slice().stable(tables, cx),
},
_ => panic!("variant layout should be Arbitrary"),
})
.collect(),
}
}
}

View file

@ -12,18 +12,17 @@ use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::{self, DepKind, DepKindVTable, DepNodeIndex};
use rustc_middle::query::erase::{Erase, erase, restore};
use rustc_middle::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache};
use rustc_middle::query::plumbing::{DynamicQuery, QuerySystem, QuerySystemFns};
use rustc_middle::query::plumbing::{QuerySystem, QuerySystemFns, QueryVTable};
use rustc_middle::query::{
AsLocalKey, DynamicQueries, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates,
queries,
AsLocalKey, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates, queries,
};
use rustc_middle::ty::TyCtxt;
use rustc_query_system::Value;
use rustc_query_system::dep_graph::SerializedDepNodeIndex;
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{
CycleError, CycleErrorHandling, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode,
QueryState, get_query_incr, get_query_non_incr,
CycleError, CycleErrorHandling, HashResult, QueryCache, QueryDispatcher, QueryMap, QueryMode,
QueryStackDeferred, QueryState, get_query_incr, get_query_non_incr,
};
use rustc_span::{ErrorGuaranteed, Span};
@ -37,30 +36,39 @@ pub use crate::plumbing::{QueryCtxt, query_key_hash_verify_all};
mod profiling_support;
pub use self::profiling_support::alloc_self_profile_query_strings;
struct DynamicConfig<
/// Combines a [`QueryVTable`] with some additional compile-time booleans
/// to implement [`QueryDispatcher`], for use by code in [`rustc_query_system`].
///
/// Baking these boolean flags into the type gives a modest but measurable
/// improvement to compiler perf and compiler code size; see
/// <https://github.com/rust-lang/rust/pull/151633>.
struct SemiDynamicQueryDispatcher<
'tcx,
C: QueryCache,
const ANON: bool,
const DEPTH_LIMIT: bool,
const FEEDABLE: bool,
> {
dynamic: &'tcx DynamicQuery<'tcx, C>,
vtable: &'tcx QueryVTable<'tcx, C>,
}
// Manually implement Copy/Clone, because deriving would put trait bounds on the cache type.
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Copy
for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
{
}
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Clone
for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
{
fn clone(&self) -> Self {
*self
}
}
// This is `impl QueryDispatcher for SemiDynamicQueryDispatcher`.
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool>
QueryConfig<QueryCtxt<'tcx>> for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
QueryDispatcher<QueryCtxt<'tcx>>
for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
where
for<'a> C::Key: HashStable<StableHashingContext<'a>>,
{
@ -70,16 +78,19 @@ where
#[inline(always)]
fn name(self) -> &'static str {
self.dynamic.name
self.vtable.name
}
#[inline(always)]
fn cache_on_disk(self, tcx: TyCtxt<'tcx>, key: &Self::Key) -> bool {
(self.dynamic.cache_on_disk)(tcx, key)
(self.vtable.cache_on_disk)(tcx, key)
}
#[inline(always)]
fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState<Self::Key>
fn query_state<'a>(
self,
qcx: QueryCtxt<'tcx>,
) -> &'a QueryState<Self::Key, QueryStackDeferred<'tcx>>
where
QueryCtxt<'tcx>: 'a,
{
@ -87,8 +98,8 @@ where
// This is just manually doing the subfield referencing through pointer math.
unsafe {
&*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>)
.byte_add(self.dynamic.query_state)
.cast::<QueryState<Self::Key>>()
.byte_add(self.vtable.query_state)
.cast::<QueryState<Self::Key, QueryStackDeferred<'tcx>>>()
}
}
@ -101,19 +112,19 @@ where
// This is just manually doing the subfield referencing through pointer math.
unsafe {
&*(&qcx.tcx.query_system.caches as *const QueryCaches<'tcx>)
.byte_add(self.dynamic.query_cache)
.byte_add(self.vtable.query_cache)
.cast::<Self::Cache>()
}
}
#[inline(always)]
fn execute_query(self, tcx: TyCtxt<'tcx>, key: Self::Key) -> Self::Value {
(self.dynamic.execute_query)(tcx, key)
(self.vtable.execute_query)(tcx, key)
}
#[inline(always)]
fn compute(self, qcx: QueryCtxt<'tcx>, key: Self::Key) -> Self::Value {
(self.dynamic.compute)(qcx.tcx, key)
(self.vtable.compute)(qcx.tcx, key)
}
#[inline(always)]
@ -124,8 +135,8 @@ where
prev_index: SerializedDepNodeIndex,
index: DepNodeIndex,
) -> Option<Self::Value> {
if self.dynamic.can_load_from_disk {
(self.dynamic.try_load_from_disk)(qcx.tcx, key, prev_index, index)
if self.vtable.can_load_from_disk {
(self.vtable.try_load_from_disk)(qcx.tcx, key, prev_index, index)
} else {
None
}
@ -138,7 +149,7 @@ where
key: &Self::Key,
index: SerializedDepNodeIndex,
) -> bool {
(self.dynamic.loadable_from_disk)(qcx.tcx, key, index)
(self.vtable.loadable_from_disk)(qcx.tcx, key, index)
}
fn value_from_cycle_error(
@ -147,12 +158,12 @@ where
cycle_error: &CycleError,
guar: ErrorGuaranteed,
) -> Self::Value {
(self.dynamic.value_from_cycle_error)(tcx, cycle_error, guar)
(self.vtable.value_from_cycle_error)(tcx, cycle_error, guar)
}
#[inline(always)]
fn format_value(self) -> fn(&Self::Value) -> String {
self.dynamic.format_value
self.vtable.format_value
}
#[inline(always)]
@ -162,7 +173,7 @@ where
#[inline(always)]
fn eval_always(self) -> bool {
self.dynamic.eval_always
self.vtable.eval_always
}
#[inline(always)]
@ -177,31 +188,42 @@ where
#[inline(always)]
fn dep_kind(self) -> DepKind {
self.dynamic.dep_kind
self.vtable.dep_kind
}
#[inline(always)]
fn cycle_error_handling(self) -> CycleErrorHandling {
self.dynamic.cycle_error_handling
self.vtable.cycle_error_handling
}
#[inline(always)]
fn hash_result(self) -> HashResult<Self::Value> {
self.dynamic.hash_result
self.vtable.hash_result
}
}
/// This is implemented per query. It allows restoring query values from their erased state
/// and constructing a QueryConfig.
trait QueryConfigRestored<'tcx> {
type RestoredValue;
type Config: QueryConfig<QueryCtxt<'tcx>>;
/// Provides access to vtable-like operations for a query
/// (by creating a [`QueryDispatcher`]),
/// but also keeps track of the "unerased" value type of the query
/// (i.e. the actual result type in the query declaration).
///
/// This trait allows some per-query code to be defined in generic functions
/// with a trait bound, instead of having to be defined inline within a macro
/// expansion.
///
/// There is one macro-generated implementation of this trait for each query,
/// on the type `rustc_query_impl::query_impl::$name::QueryType`.
trait QueryDispatcherUnerased<'tcx> {
type UnerasedValue;
type Dispatcher: QueryDispatcher<QueryCtxt<'tcx>>;
const NAME: &'static &'static str;
fn config(tcx: TyCtxt<'tcx>) -> Self::Config;
fn restore(value: <Self::Config as QueryConfig<QueryCtxt<'tcx>>>::Value)
-> Self::RestoredValue;
fn query_dispatcher(tcx: TyCtxt<'tcx>) -> Self::Dispatcher;
fn restore_val(
value: <Self::Dispatcher as QueryDispatcher<QueryCtxt<'tcx>>>::Value,
) -> Self::UnerasedValue;
}
pub fn query_system<'a>(
@ -214,7 +236,7 @@ pub fn query_system<'a>(
states: Default::default(),
arenas: Default::default(),
caches: Default::default(),
dynamic_queries: dynamic_queries(),
query_vtables: make_query_vtables(),
on_disk_cache,
fns: QuerySystemFns {
engine: engine(incremental),

View file

@ -6,6 +6,7 @@ use std::num::NonZero;
use rustc_data_structures::jobserver::Proxy;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_data_structures::unord::UnordMap;
use rustc_hashes::Hash64;
use rustc_hir::limit::Limit;
@ -26,14 +27,14 @@ use rustc_middle::ty::{self, TyCtxt};
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{
QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, QueryStackFrame,
force_query,
QueryCache, QueryContext, QueryDispatcher, QueryJobId, QueryMap, QuerySideEffect,
QueryStackDeferred, QueryStackFrame, QueryStackFrameExtra, force_query,
};
use rustc_query_system::{QueryOverflow, QueryOverflowNote};
use rustc_serialize::{Decodable, Encodable};
use rustc_span::def_id::LOCAL_CRATE;
use crate::QueryConfigRestored;
use crate::QueryDispatcherUnerased;
/// Implements [`QueryContext`] for use by [`rustc_query_system`], since that
/// crate does not have direct access to [`TyCtxt`].
@ -59,7 +60,9 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
}
}
impl QueryContext for QueryCtxt<'_> {
impl<'tcx> QueryContext for QueryCtxt<'tcx> {
type QueryInfo = QueryStackDeferred<'tcx>;
#[inline]
fn jobserver_proxy(&self) -> &Proxy {
&self.tcx.jobserver_proxy
@ -90,7 +93,10 @@ impl QueryContext for QueryCtxt<'_> {
/// Prefer passing `false` to `require_complete` to avoid potential deadlocks,
/// especially when called from within a deadlock handler, unless a
/// complete map is needed and no deadlock is possible at this call site.
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap, QueryMap> {
fn collect_active_jobs(
self,
require_complete: bool,
) -> Result<QueryMap<QueryStackDeferred<'tcx>>, QueryMap<QueryStackDeferred<'tcx>>> {
let mut jobs = QueryMap::default();
let mut complete = true;
@ -103,6 +109,13 @@ impl QueryContext for QueryCtxt<'_> {
if complete { Ok(jobs) } else { Err(jobs) }
}
fn lift_query_info(
self,
info: &QueryStackDeferred<'tcx>,
) -> rustc_query_system::query::QueryStackFrameExtra {
info.extract()
}
// Interactions with on_disk_cache
fn load_side_effect(
self,
@ -166,7 +179,10 @@ impl QueryContext for QueryCtxt<'_> {
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote { desc: info.query.description, depth },
note: QueryOverflowNote {
desc: self.lift_query_info(&info.query.info).description,
depth,
},
suggested_limit,
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
@ -303,16 +319,17 @@ macro_rules! should_ever_cache_on_disk {
};
}
pub(crate) fn create_query_frame<
'tcx,
K: Copy + Key + for<'a> HashStable<StableHashingContext<'a>>,
>(
tcx: TyCtxt<'tcx>,
do_describe: fn(TyCtxt<'tcx>, K) -> String,
key: K,
kind: DepKind,
name: &'static str,
) -> QueryStackFrame {
fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>(
(tcx, key, kind, name, do_describe): (
TyCtxt<'tcx>,
K,
DepKind,
&'static str,
fn(TyCtxt<'tcx>, K) -> String,
),
) -> QueryStackFrameExtra {
let def_id = key.key_as_def_id();
// If reduced queries are requested, we may be printing a query stack due
// to a panic. Avoid using `default_span` and `def_kind` in that case.
let reduce_queries = with_reduced_queries();
@ -324,46 +341,59 @@ pub(crate) fn create_query_frame<
} else {
description
};
let span = if reduce_queries {
let span = if kind == dep_graph::dep_kinds::def_span || reduce_queries {
// The `def_span` query is used to calculate `default_span`,
// so exit to avoid infinite recursion.
None
} else {
Some(tcx.with_reduced_queries(|| key.default_span(tcx)))
Some(key.default_span(tcx))
};
let def_id = key.key_as_def_id();
let def_kind = if reduce_queries {
let def_kind = if kind == dep_graph::dep_kinds::def_kind || reduce_queries {
// Try to avoid infinite recursion.
None
} else {
def_id
.and_then(|def_id| def_id.as_local())
.map(|def_id| tcx.with_reduced_queries(|| tcx.def_kind(def_id)))
def_id.and_then(|def_id| def_id.as_local()).map(|def_id| tcx.def_kind(def_id))
};
QueryStackFrameExtra::new(description, span, def_kind)
}
pub(crate) fn create_query_frame<
'tcx,
K: Copy + DynSend + DynSync + Key + for<'a> HashStable<StableHashingContext<'a>> + 'tcx,
>(
tcx: TyCtxt<'tcx>,
do_describe: fn(TyCtxt<'tcx>, K) -> String,
key: K,
kind: DepKind,
name: &'static str,
) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
let def_id = key.key_as_def_id();
let hash = || {
tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
kind.as_usize().hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<Hash64>()
})
};
let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle();
let hash = tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
kind.as_usize().hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<Hash64>()
});
let info =
QueryStackDeferred::new((tcx, key, kind, name, do_describe), create_query_frame_extra);
QueryStackFrame::new(description, span, def_id, def_kind, kind, def_id_for_ty_in_cycle, hash)
QueryStackFrame::new(info, kind, hash, def_id, def_id_for_ty_in_cycle)
}
pub(crate) fn encode_query_results<'a, 'tcx, Q>(
query: Q::Config,
query: Q::Dispatcher,
qcx: QueryCtxt<'tcx>,
encoder: &mut CacheEncoder<'a, 'tcx>,
query_result_index: &mut EncodedDepNodeIndex,
) where
Q: super::QueryConfigRestored<'tcx>,
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
Q: QueryDispatcherUnerased<'tcx>,
Q::UnerasedValue: Encodable<CacheEncoder<'a, 'tcx>>,
{
let _timer = qcx.tcx.prof.generic_activity_with_arg("encode_query_results_for", query.name());
@ -378,13 +408,13 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
// Encode the type check tables with the `SerializedDepNodeIndex`
// as tag.
encoder.encode_tagged(dep_node, &Q::restore(*value));
encoder.encode_tagged(dep_node, &Q::restore_val(*value));
}
});
}
pub(crate) fn query_key_hash_verify<'tcx>(
query: impl QueryConfig<QueryCtxt<'tcx>>,
query: impl QueryDispatcher<QueryCtxt<'tcx>>,
qcx: QueryCtxt<'tcx>,
) {
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());
@ -412,7 +442,7 @@ pub(crate) fn query_key_hash_verify<'tcx>(
fn try_load_from_on_disk_cache<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode)
where
Q: QueryConfig<QueryCtxt<'tcx>>,
Q: QueryDispatcher<QueryCtxt<'tcx>>,
{
debug_assert!(tcx.dep_graph.is_green(&dep_node));
@ -458,7 +488,7 @@ where
fn force_from_dep_node<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode) -> bool
where
Q: QueryConfig<QueryCtxt<'tcx>>,
Q: QueryDispatcher<QueryCtxt<'tcx>>,
{
// We must avoid ever having to call `force_from_dep_node()` for a
// `DepNode::codegen_unit`:
@ -491,9 +521,10 @@ pub(crate) fn make_dep_kind_vtable_for_query<'tcx, Q>(
is_eval_always: bool,
) -> DepKindVTable<'tcx>
where
Q: QueryConfigRestored<'tcx>,
Q: QueryDispatcherUnerased<'tcx>,
{
let fingerprint_style = <Q::Config as QueryConfig<QueryCtxt<'tcx>>>::Key::fingerprint_style();
let fingerprint_style =
<Q::Dispatcher as QueryDispatcher<QueryCtxt<'tcx>>>::Key::fingerprint_style();
if is_anon || !fingerprint_style.reconstructible() {
return DepKindVTable {
@ -511,10 +542,10 @@ where
is_eval_always,
fingerprint_style,
force_from_dep_node: Some(|tcx, dep_node, _| {
force_from_dep_node(Q::config(tcx), tcx, dep_node)
force_from_dep_node(Q::query_dispatcher(tcx), tcx, dep_node)
}),
try_load_from_on_disk_cache: Some(|tcx, dep_node| {
try_load_from_on_disk_cache(Q::config(tcx), tcx, dep_node)
try_load_from_on_disk_cache(Q::query_dispatcher(tcx), tcx, dep_node)
}),
name: Q::NAME,
}
@ -583,7 +614,7 @@ macro_rules! define_queries {
#[cfg(debug_assertions)]
let _guard = tracing::span!(tracing::Level::TRACE, stringify!($name), ?key).entered();
get_query_incr(
QueryType::config(tcx),
QueryType::query_dispatcher(tcx),
QueryCtxt::new(tcx),
span,
key,
@ -603,7 +634,7 @@ macro_rules! define_queries {
__mode: QueryMode,
) -> Option<Erase<queries::$name::Value<'tcx>>> {
Some(get_query_non_incr(
QueryType::config(tcx),
QueryType::query_dispatcher(tcx),
QueryCtxt::new(tcx),
span,
key,
@ -611,10 +642,10 @@ macro_rules! define_queries {
}
}
pub(crate) fn dynamic_query<'tcx>()
-> DynamicQuery<'tcx, queries::$name::Storage<'tcx>>
pub(crate) fn make_query_vtable<'tcx>()
-> QueryVTable<'tcx, queries::$name::Storage<'tcx>>
{
DynamicQuery {
QueryVTable {
name: stringify!($name),
eval_always: is_eval_always!([$($modifiers)*]),
dep_kind: dep_graph::dep_kinds::$name,
@ -680,9 +711,9 @@ macro_rules! define_queries {
data: PhantomData<&'tcx ()>
}
impl<'tcx> QueryConfigRestored<'tcx> for QueryType<'tcx> {
type RestoredValue = queries::$name::Value<'tcx>;
type Config = DynamicConfig<
impl<'tcx> QueryDispatcherUnerased<'tcx> for QueryType<'tcx> {
type UnerasedValue = queries::$name::Value<'tcx>;
type Dispatcher = SemiDynamicQueryDispatcher<
'tcx,
queries::$name::Storage<'tcx>,
{ is_anon!([$($modifiers)*]) },
@ -693,21 +724,21 @@ macro_rules! define_queries {
const NAME: &'static &'static str = &stringify!($name);
#[inline(always)]
fn config(tcx: TyCtxt<'tcx>) -> Self::Config {
DynamicConfig {
dynamic: &tcx.query_system.dynamic_queries.$name,
fn query_dispatcher(tcx: TyCtxt<'tcx>) -> Self::Dispatcher {
SemiDynamicQueryDispatcher {
vtable: &tcx.query_system.query_vtables.$name,
}
}
#[inline(always)]
fn restore(value: <Self::Config as QueryConfig<QueryCtxt<'tcx>>>::Value) -> Self::RestoredValue {
fn restore_val(value: <Self::Dispatcher as QueryDispatcher<QueryCtxt<'tcx>>>::Value) -> Self::UnerasedValue {
restore::<queries::$name::Value<'tcx>>(value)
}
}
pub(crate) fn collect_active_jobs<'tcx>(
tcx: TyCtxt<'tcx>,
qmap: &mut QueryMap,
qmap: &mut QueryMap<QueryStackDeferred<'tcx>>,
require_complete: bool,
) -> Option<()> {
let make_query = |tcx, key| {
@ -752,7 +783,7 @@ macro_rules! define_queries {
query_result_index: &mut EncodedDepNodeIndex
) {
$crate::plumbing::encode_query_results::<query_impl::$name::QueryType<'tcx>>(
query_impl::$name::QueryType::config(tcx),
query_impl::$name::QueryType::query_dispatcher(tcx),
QueryCtxt::new(tcx),
encoder,
query_result_index,
@ -762,7 +793,7 @@ macro_rules! define_queries {
pub(crate) fn query_key_hash_verify<'tcx>(tcx: TyCtxt<'tcx>) {
$crate::plumbing::query_key_hash_verify(
query_impl::$name::QueryType::config(tcx),
query_impl::$name::QueryType::query_dispatcher(tcx),
QueryCtxt::new(tcx),
)
}
@ -780,10 +811,10 @@ macro_rules! define_queries {
}
}
pub fn dynamic_queries<'tcx>() -> DynamicQueries<'tcx> {
DynamicQueries {
pub fn make_query_vtables<'tcx>() -> ::rustc_middle::query::PerQueryVTables<'tcx> {
::rustc_middle::query::PerQueryVTables {
$(
$name: query_impl::$name::dynamic_query(),
$name: query_impl::$name::make_query_vtable(),
)*
}
}
@ -791,7 +822,7 @@ macro_rules! define_queries {
// These arrays are used for iteration and can't be indexed by `DepKind`.
const COLLECT_ACTIVE_JOBS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap, bool) -> Option<()>
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<QueryStackDeferred<'tcx>>, bool) -> Option<()>
] =
&[$(query_impl::$name::collect_active_jobs),*];

View file

@ -1,11 +1,10 @@
//! Query configuration and description traits.
use std::fmt::Debug;
use std::hash::Hash;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_span::ErrorGuaranteed;
use super::QueryStackFrameExtra;
use crate::dep_graph::{DepKind, DepNode, DepNodeParams, SerializedDepNodeIndex};
use crate::ich::StableHashingContext;
use crate::query::caches::QueryCache;
@ -13,7 +12,15 @@ use crate::query::{CycleError, CycleErrorHandling, DepNodeIndex, QueryContext, Q
pub type HashResult<V> = Option<fn(&mut StableHashingContext<'_>, &V) -> Fingerprint>;
pub trait QueryConfig<Qcx: QueryContext>: Copy {
/// Trait that can be used as a vtable for a single query, providing operations
/// and metadata for that query.
///
/// Implemented by `rustc_query_impl::SemiDynamicQueryDispatcher`, which
/// mostly delegates to `rustc_middle::query::plumbing::QueryVTable`.
/// Those types are not visible from this `rustc_query_system` crate.
///
/// "Dispatcher" should be understood as a near-synonym of "vtable".
pub trait QueryDispatcher<Qcx: QueryContext>: Copy {
fn name(self) -> &'static str;
// `Key` and `Value` are `Copy` instead of `Clone` to ensure copying them stays cheap,
@ -26,7 +33,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn format_value(self) -> fn(&Self::Value) -> String;
// Don't use this method to access query results, instead use the methods on TyCtxt
fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key>
fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key, Qcx::QueryInfo>
where
Qcx: 'a;
@ -56,7 +63,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn value_from_cycle_error(
self,
tcx: Qcx::DepContext,
cycle_error: &CycleError,
cycle_error: &CycleError<QueryStackFrameExtra>,
guar: ErrorGuaranteed,
) -> Self::Value;

View file

@ -1,3 +1,4 @@
use std::fmt::Debug;
use std::hash::Hash;
use std::io::Write;
use std::iter;
@ -11,6 +12,7 @@ use rustc_hir::def::DefKind;
use rustc_session::Session;
use rustc_span::{DUMMY_SP, Span};
use super::QueryStackFrameExtra;
use crate::dep_graph::DepContext;
use crate::error::CycleStack;
use crate::query::plumbing::CycleError;
@ -18,45 +20,54 @@ use crate::query::{QueryContext, QueryStackFrame};
/// Represents a span and a query key.
#[derive(Clone, Debug)]
pub struct QueryInfo {
pub struct QueryInfo<I> {
/// The span corresponding to the reason for which this query was required.
pub span: Span,
pub query: QueryStackFrame,
pub query: QueryStackFrame<I>,
}
pub type QueryMap = FxHashMap<QueryJobId, QueryJobInfo>;
impl<I> QueryInfo<I> {
pub(crate) fn lift<Qcx: QueryContext<QueryInfo = I>>(
&self,
qcx: Qcx,
) -> QueryInfo<QueryStackFrameExtra> {
QueryInfo { span: self.span, query: self.query.lift(qcx) }
}
}
pub type QueryMap<I> = FxHashMap<QueryJobId, QueryJobInfo<I>>;
/// A value uniquely identifying an active query job.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct QueryJobId(pub NonZero<u64>);
impl QueryJobId {
fn query(self, map: &QueryMap) -> QueryStackFrame {
fn query<I: Clone>(self, map: &QueryMap<I>) -> QueryStackFrame<I> {
map.get(&self).unwrap().query.clone()
}
fn span(self, map: &QueryMap) -> Span {
fn span<I>(self, map: &QueryMap<I>) -> Span {
map.get(&self).unwrap().job.span
}
fn parent(self, map: &QueryMap) -> Option<QueryJobId> {
fn parent<I>(self, map: &QueryMap<I>) -> Option<QueryJobId> {
map.get(&self).unwrap().job.parent
}
fn latch(self, map: &QueryMap) -> Option<&QueryLatch> {
fn latch<I>(self, map: &QueryMap<I>) -> Option<&QueryLatch<I>> {
map.get(&self).unwrap().job.latch.as_ref()
}
}
#[derive(Clone, Debug)]
pub struct QueryJobInfo {
pub query: QueryStackFrame,
pub job: QueryJob,
pub struct QueryJobInfo<I> {
pub query: QueryStackFrame<I>,
pub job: QueryJob<I>,
}
/// Represents an active query job.
#[derive(Debug)]
pub struct QueryJob {
pub struct QueryJob<I> {
pub id: QueryJobId,
/// The span corresponding to the reason for which this query was required.
@ -66,23 +77,23 @@ pub struct QueryJob {
pub parent: Option<QueryJobId>,
/// The latch that is used to wait on this job.
latch: Option<QueryLatch>,
latch: Option<QueryLatch<I>>,
}
impl Clone for QueryJob {
impl<I> Clone for QueryJob<I> {
fn clone(&self) -> Self {
Self { id: self.id, span: self.span, parent: self.parent, latch: self.latch.clone() }
}
}
impl QueryJob {
impl<I> QueryJob<I> {
/// Creates a new query job.
#[inline]
pub fn new(id: QueryJobId, span: Span, parent: Option<QueryJobId>) -> Self {
QueryJob { id, span, parent, latch: None }
}
pub(super) fn latch(&mut self) -> QueryLatch {
pub(super) fn latch(&mut self) -> QueryLatch<I> {
if self.latch.is_none() {
self.latch = Some(QueryLatch::new());
}
@ -102,12 +113,12 @@ impl QueryJob {
}
impl QueryJobId {
pub(super) fn find_cycle_in_stack(
pub(super) fn find_cycle_in_stack<I: Clone>(
&self,
query_map: QueryMap,
query_map: QueryMap<I>,
current_job: &Option<QueryJobId>,
span: Span,
) -> CycleError {
) -> CycleError<I> {
// Find the waitee amongst `current_job` parents
let mut cycle = Vec::new();
let mut current_job = Option::clone(current_job);
@ -141,7 +152,7 @@ impl QueryJobId {
#[cold]
#[inline(never)]
pub fn find_dep_kind_root(&self, query_map: QueryMap) -> (QueryJobInfo, usize) {
pub fn find_dep_kind_root<I: Clone>(&self, query_map: QueryMap<I>) -> (QueryJobInfo<I>, usize) {
let mut depth = 1;
let info = query_map.get(&self).unwrap();
let dep_kind = info.query.dep_kind;
@ -161,31 +172,31 @@ impl QueryJobId {
}
#[derive(Debug)]
struct QueryWaiter {
struct QueryWaiter<I> {
query: Option<QueryJobId>,
condvar: Condvar,
span: Span,
cycle: Mutex<Option<CycleError>>,
cycle: Mutex<Option<CycleError<I>>>,
}
#[derive(Debug)]
struct QueryLatchInfo {
struct QueryLatchInfo<I> {
complete: bool,
waiters: Vec<Arc<QueryWaiter>>,
waiters: Vec<Arc<QueryWaiter<I>>>,
}
#[derive(Debug)]
pub(super) struct QueryLatch {
info: Arc<Mutex<QueryLatchInfo>>,
pub(super) struct QueryLatch<I> {
info: Arc<Mutex<QueryLatchInfo<I>>>,
}
impl Clone for QueryLatch {
impl<I> Clone for QueryLatch<I> {
fn clone(&self) -> Self {
Self { info: Arc::clone(&self.info) }
}
}
impl QueryLatch {
impl<I> QueryLatch<I> {
fn new() -> Self {
QueryLatch {
info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })),
@ -198,7 +209,7 @@ impl QueryLatch {
qcx: impl QueryContext,
query: Option<QueryJobId>,
span: Span,
) -> Result<(), CycleError> {
) -> Result<(), CycleError<I>> {
let waiter =
Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() });
self.wait_on_inner(qcx, &waiter);
@ -213,7 +224,7 @@ impl QueryLatch {
}
/// Awaits the caller on this latch by blocking the current thread.
fn wait_on_inner(&self, qcx: impl QueryContext, waiter: &Arc<QueryWaiter>) {
fn wait_on_inner(&self, qcx: impl QueryContext, waiter: &Arc<QueryWaiter<I>>) {
let mut info = self.info.lock();
if !info.complete {
// We push the waiter on to the `waiters` list. It can be accessed inside
@ -249,7 +260,7 @@ impl QueryLatch {
/// Removes a single waiter from the list of waiters.
/// This is used to break query cycles.
fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter> {
fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter<I>> {
let mut info = self.info.lock();
debug_assert!(!info.complete);
// Remove the waiter from the list of waiters
@ -269,7 +280,11 @@ type Waiter = (QueryJobId, usize);
/// For visits of resumable waiters it returns Some(Some(Waiter)) which has the
/// required information to resume the waiter.
/// If all `visit` calls returns None, this function also returns None.
fn visit_waiters<F>(query_map: &QueryMap, query: QueryJobId, mut visit: F) -> Option<Option<Waiter>>
fn visit_waiters<I, F>(
query_map: &QueryMap<I>,
query: QueryJobId,
mut visit: F,
) -> Option<Option<Waiter>>
where
F: FnMut(Span, QueryJobId) -> Option<Option<Waiter>>,
{
@ -299,8 +314,8 @@ where
/// `span` is the reason for the `query` to execute. This is initially DUMMY_SP.
/// If a cycle is detected, this initial value is replaced with the span causing
/// the cycle.
fn cycle_check(
query_map: &QueryMap,
fn cycle_check<I>(
query_map: &QueryMap<I>,
query: QueryJobId,
span: Span,
stack: &mut Vec<(Span, QueryJobId)>,
@ -339,8 +354,8 @@ fn cycle_check(
/// Finds out if there's a path to the compiler root (aka. code which isn't in a query)
/// from `query` without going through any of the queries in `visited`.
/// This is achieved with a depth first search.
fn connected_to_root(
query_map: &QueryMap,
fn connected_to_root<I>(
query_map: &QueryMap<I>,
query: QueryJobId,
visited: &mut FxHashSet<QueryJobId>,
) -> bool {
@ -361,7 +376,7 @@ fn connected_to_root(
}
// Deterministically pick an query from a list
fn pick_query<'a, T, F>(query_map: &QueryMap, queries: &'a [T], f: F) -> &'a T
fn pick_query<'a, I: Clone, T, F>(query_map: &QueryMap<I>, queries: &'a [T], f: F) -> &'a T
where
F: Fn(&T) -> (Span, QueryJobId),
{
@ -386,10 +401,10 @@ where
/// the function return true.
/// If a cycle was not found, the starting query is removed from `jobs` and
/// the function returns false.
fn remove_cycle(
query_map: &QueryMap,
fn remove_cycle<I: Clone>(
query_map: &QueryMap<I>,
jobs: &mut Vec<QueryJobId>,
wakelist: &mut Vec<Arc<QueryWaiter>>,
wakelist: &mut Vec<Arc<QueryWaiter<I>>>,
) -> bool {
let mut visited = FxHashSet::default();
let mut stack = Vec::new();
@ -490,7 +505,10 @@ fn remove_cycle(
/// uses a query latch and then resuming that waiter.
/// There may be multiple cycles involved in a deadlock, so this searches
/// all active queries for cycles before finally resuming all the waiters at once.
pub fn break_query_cycles(query_map: QueryMap, registry: &rustc_thread_pool::Registry) {
pub fn break_query_cycles<I: Clone + Debug>(
query_map: QueryMap<I>,
registry: &rustc_thread_pool::Registry,
) {
let mut wakelist = Vec::new();
// It is OK per the comments:
// - https://github.com/rust-lang/rust/pull/131200#issuecomment-2798854932
@ -541,7 +559,7 @@ pub fn report_cycle<'a>(
) -> Diag<'a> {
assert!(!stack.is_empty());
let span = stack[0].query.default_span(stack[1 % stack.len()].span);
let span = stack[0].query.info.default_span(stack[1 % stack.len()].span);
let mut cycle_stack = Vec::new();
@ -550,31 +568,31 @@ pub fn report_cycle<'a>(
for i in 1..stack.len() {
let query = &stack[i].query;
let span = query.default_span(stack[(i + 1) % stack.len()].span);
cycle_stack.push(CycleStack { span, desc: query.description.to_owned() });
let span = query.info.default_span(stack[(i + 1) % stack.len()].span);
cycle_stack.push(CycleStack { span, desc: query.info.description.to_owned() });
}
let mut cycle_usage = None;
if let Some((span, ref query)) = *usage {
cycle_usage = Some(crate::error::CycleUsage {
span: query.default_span(span),
usage: query.description.to_string(),
span: query.info.default_span(span),
usage: query.info.description.to_string(),
});
}
let alias = if stack.iter().all(|entry| matches!(entry.query.def_kind, Some(DefKind::TyAlias)))
{
Some(crate::error::Alias::Ty)
} else if stack.iter().all(|entry| entry.query.def_kind == Some(DefKind::TraitAlias)) {
Some(crate::error::Alias::Trait)
} else {
None
};
let alias =
if stack.iter().all(|entry| matches!(entry.query.info.def_kind, Some(DefKind::TyAlias))) {
Some(crate::error::Alias::Ty)
} else if stack.iter().all(|entry| entry.query.info.def_kind == Some(DefKind::TraitAlias)) {
Some(crate::error::Alias::Trait)
} else {
None
};
let cycle_diag = crate::error::Cycle {
span,
cycle_stack,
stack_bottom: stack[0].query.description.to_owned(),
stack_bottom: stack[0].query.info.description.to_owned(),
alias,
cycle_usage,
stack_count,
@ -610,11 +628,12 @@ pub fn print_query_stack<Qcx: QueryContext>(
let Some(query_info) = query_map.get(&query) else {
break;
};
let query_extra = qcx.lift_query_info(&query_info.query.info);
if Some(count_printed) < limit_frames || limit_frames.is_none() {
// Only print to stderr as many stack frames as `num_frames` when present.
dcx.struct_failure_note(format!(
"#{} [{:?}] {}",
count_printed, query_info.query.dep_kind, query_info.query.description
count_printed, query_info.query.dep_kind, query_extra.description
))
.with_span(query_info.job.span)
.emit();
@ -627,7 +646,7 @@ pub fn print_query_stack<Qcx: QueryContext>(
"#{} [{}] {}",
count_total,
qcx.dep_context().dep_kind_vtable(query_info.query.dep_kind).name,
query_info.query.description
query_extra.description
);
}

View file

@ -1,4 +1,10 @@
use std::fmt::Debug;
use std::marker::PhantomData;
use std::mem::transmute;
use std::sync::Arc;
use rustc_data_structures::jobserver::Proxy;
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_errors::DiagInner;
use rustc_hashes::Hash64;
use rustc_hir::def::DefKind;
@ -7,7 +13,7 @@ use rustc_span::Span;
use rustc_span::def_id::DefId;
pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache};
pub use self::config::{HashResult, QueryConfig};
pub use self::dispatcher::{HashResult, QueryDispatcher};
pub use self::job::{
QueryInfo, QueryJob, QueryJobId, QueryJobInfo, QueryMap, break_query_cycles, print_query_stack,
report_cycle,
@ -16,7 +22,7 @@ pub use self::plumbing::*;
use crate::dep_graph::{DepKind, DepNodeIndex, HasDepContext, SerializedDepNodeIndex};
mod caches;
mod config;
mod dispatcher;
mod job;
mod plumbing;
@ -36,31 +42,59 @@ pub enum CycleErrorHandling {
///
/// This is mostly used in case of cycles for error reporting.
#[derive(Clone, Debug)]
pub struct QueryStackFrame {
pub description: String,
span: Option<Span>,
pub def_id: Option<DefId>,
pub def_kind: Option<DefKind>,
/// A def-id that is extracted from a `Ty` in a query key
pub def_id_for_ty_in_cycle: Option<DefId>,
pub struct QueryStackFrame<I> {
/// This field initially stores a `QueryStackDeferred` during collection,
/// but can later be changed to `QueryStackFrameExtra` containing concrete information
/// by calling `lift`. This is done so that collecting query does not need to invoke
/// queries, instead `lift` will call queries in a more appropriate location.
pub info: I,
pub dep_kind: DepKind,
/// This hash is used to deterministically pick
/// a query to remove cycles in the parallel compiler.
hash: Hash64,
pub def_id: Option<DefId>,
/// A def-id that is extracted from a `Ty` in a query key
pub def_id_for_ty_in_cycle: Option<DefId>,
}
impl QueryStackFrame {
impl<I> QueryStackFrame<I> {
#[inline]
pub fn new(
description: String,
span: Option<Span>,
def_id: Option<DefId>,
def_kind: Option<DefKind>,
info: I,
dep_kind: DepKind,
hash: impl FnOnce() -> Hash64,
def_id: Option<DefId>,
def_id_for_ty_in_cycle: Option<DefId>,
hash: Hash64,
) -> Self {
Self { description, span, def_id, def_kind, def_id_for_ty_in_cycle, dep_kind, hash }
Self { info, def_id, dep_kind, hash: hash(), def_id_for_ty_in_cycle }
}
fn lift<Qcx: QueryContext<QueryInfo = I>>(
&self,
qcx: Qcx,
) -> QueryStackFrame<QueryStackFrameExtra> {
QueryStackFrame {
info: qcx.lift_query_info(&self.info),
dep_kind: self.dep_kind,
hash: self.hash,
def_id: self.def_id,
def_id_for_ty_in_cycle: self.def_id_for_ty_in_cycle,
}
}
}
#[derive(Clone, Debug)]
pub struct QueryStackFrameExtra {
pub description: String,
span: Option<Span>,
pub def_kind: Option<DefKind>,
}
impl QueryStackFrameExtra {
#[inline]
pub fn new(description: String, span: Option<Span>, def_kind: Option<DefKind>) -> Self {
Self { description, span, def_kind }
}
// FIXME(eddyb) Get more valid `Span`s on queries.
@ -73,6 +107,40 @@ impl QueryStackFrame {
}
}
/// Track a 'side effect' for a particular query.
/// This is used to hold a closure which can create `QueryStackFrameExtra`.
#[derive(Clone)]
pub struct QueryStackDeferred<'tcx> {
_dummy: PhantomData<&'tcx ()>,
// `extract` may contain references to 'tcx, but we can't tell drop checking that it won't
// access it in the destructor.
extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend>,
}
impl<'tcx> QueryStackDeferred<'tcx> {
pub fn new<C: Copy + DynSync + DynSend + 'tcx>(
context: C,
extract: fn(C) -> QueryStackFrameExtra,
) -> Self {
let extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend + 'tcx> =
Arc::new(move || extract(context));
// SAFETY: The `extract` closure does not access 'tcx in its destructor as the only
// captured variable is `context` which is Copy and cannot have a destructor.
Self { _dummy: PhantomData, extract: unsafe { transmute(extract) } }
}
pub fn extract(&self) -> QueryStackFrameExtra {
(self.extract)()
}
}
impl<'tcx> Debug for QueryStackDeferred<'tcx> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("QueryStackDeferred")
}
}
/// Tracks 'side effects' for a particular query.
/// This struct is saved to disk along with the query result,
/// and loaded from disk if we mark the query as green.
@ -92,6 +160,8 @@ pub enum QuerySideEffect {
}
pub trait QueryContext: HasDepContext {
type QueryInfo: Clone;
/// Gets a jobserver reference which is used to release then acquire
/// a token while waiting on a query.
fn jobserver_proxy(&self) -> &Proxy;
@ -101,7 +171,12 @@ pub trait QueryContext: HasDepContext {
/// Get the query information from the TLS context.
fn current_query_job(self) -> Option<QueryJobId>;
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap, QueryMap>;
fn collect_active_jobs(
self,
require_complete: bool,
) -> Result<QueryMap<Self::QueryInfo>, QueryMap<Self::QueryInfo>>;
fn lift_query_info(self, info: &Self::QueryInfo) -> QueryStackFrameExtra;
/// Load a side effect associated to the node in the previous session.
fn load_side_effect(

View file

@ -18,7 +18,7 @@ use rustc_errors::{Diag, FatalError, StashKey};
use rustc_span::{DUMMY_SP, Span};
use tracing::instrument;
use super::QueryConfig;
use super::{QueryDispatcher, QueryStackFrameExtra};
use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams};
use crate::ich::StableHashingContext;
use crate::query::caches::QueryCache;
@ -32,23 +32,23 @@ fn equivalent_key<K: Eq, V>(k: &K) -> impl Fn(&(K, V)) -> bool + '_ {
move |x| x.0 == *k
}
pub struct QueryState<K> {
active: Sharded<hashbrown::HashTable<(K, QueryResult)>>,
pub struct QueryState<K, I> {
active: Sharded<hashbrown::HashTable<(K, QueryResult<I>)>>,
}
/// Indicates the state of a query for a given key in a query map.
enum QueryResult {
enum QueryResult<I> {
/// An already executing query. The query job can be used to await for its completion.
Started(QueryJob),
Started(QueryJob<I>),
/// The query panicked. Queries trying to wait on this will raise a fatal error which will
/// silently panic.
Poisoned,
}
impl QueryResult {
impl<I> QueryResult<I> {
/// Unwraps the query job expecting that it has started.
fn expect_job(self) -> QueryJob {
fn expect_job(self) -> QueryJob<I> {
match self {
Self::Started(job) => job,
Self::Poisoned => {
@ -58,7 +58,7 @@ impl QueryResult {
}
}
impl<K> QueryState<K>
impl<K, I> QueryState<K, I>
where
K: Eq + Hash + Copy + Debug,
{
@ -69,13 +69,13 @@ where
pub fn collect_active_jobs<Qcx: Copy>(
&self,
qcx: Qcx,
make_query: fn(Qcx, K) -> QueryStackFrame,
jobs: &mut QueryMap,
make_query: fn(Qcx, K) -> QueryStackFrame<I>,
jobs: &mut QueryMap<I>,
require_complete: bool,
) -> Option<()> {
let mut active = Vec::new();
let mut collect = |iter: LockGuard<'_, HashTable<(K, QueryResult)>>| {
let mut collect = |iter: LockGuard<'_, HashTable<(K, QueryResult<I>)>>| {
for (k, v) in iter.iter() {
if let QueryResult::Started(ref job) = *v {
active.push((*k, job.clone()));
@ -106,19 +106,19 @@ where
}
}
impl<K> Default for QueryState<K> {
fn default() -> QueryState<K> {
impl<K, I> Default for QueryState<K, I> {
fn default() -> QueryState<K, I> {
QueryState { active: Default::default() }
}
}
/// A type representing the responsibility to execute the job in the `job` field.
/// This will poison the relevant query if dropped.
struct JobOwner<'tcx, K>
struct JobOwner<'tcx, K, I>
where
K: Eq + Hash + Copy,
{
state: &'tcx QueryState<K>,
state: &'tcx QueryState<K, I>,
key: K,
}
@ -126,7 +126,7 @@ where
#[inline(never)]
fn mk_cycle<Q, Qcx>(query: Q, qcx: Qcx, cycle_error: CycleError) -> Q::Value
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
let error = report_cycle(qcx.dep_context().sess(), &cycle_error);
@ -140,7 +140,7 @@ fn handle_cycle_error<Q, Qcx>(
error: Diag<'_>,
) -> Q::Value
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
match query.cycle_error_handling() {
@ -159,7 +159,7 @@ where
}
CycleErrorHandling::Stash => {
let guar = if let Some(root) = cycle_error.cycle.first()
&& let Some(span) = root.query.span
&& let Some(span) = root.query.info.span
{
error.stash(span, StashKey::Cycle).unwrap()
} else {
@ -170,7 +170,7 @@ where
}
}
impl<'tcx, K> JobOwner<'tcx, K>
impl<'tcx, K, I> JobOwner<'tcx, K, I>
where
K: Eq + Hash + Copy,
{
@ -207,7 +207,7 @@ where
}
}
impl<'tcx, K> Drop for JobOwner<'tcx, K>
impl<'tcx, K, I> Drop for JobOwner<'tcx, K, I>
where
K: Eq + Hash + Copy,
{
@ -235,10 +235,19 @@ where
}
#[derive(Clone, Debug)]
pub struct CycleError {
pub struct CycleError<I = QueryStackFrameExtra> {
/// The query and related span that uses the cycle.
pub usage: Option<(Span, QueryStackFrame)>,
pub cycle: Vec<QueryInfo>,
pub usage: Option<(Span, QueryStackFrame<I>)>,
pub cycle: Vec<QueryInfo<I>>,
}
impl<I> CycleError<I> {
fn lift<Qcx: QueryContext<QueryInfo = I>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> {
CycleError {
usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))),
cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(),
}
}
}
/// Checks whether there is already a value for this key in the in-memory
@ -270,15 +279,15 @@ fn cycle_error<Q, Qcx>(
span: Span,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
let query_map = qcx.collect_active_jobs(false).expect("failed to collect active queries");
let query_map = qcx.collect_active_jobs(false).ok().expect("failed to collect active queries");
let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span);
(mk_cycle(query, qcx, error), None)
(mk_cycle(query, qcx, error.lift(qcx)), None)
}
#[inline(always)]
@ -287,11 +296,11 @@ fn wait_for_query<Q, Qcx>(
qcx: Qcx,
span: Span,
key: Q::Key,
latch: QueryLatch,
latch: QueryLatch<Qcx::QueryInfo>,
current: Option<QueryJobId>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
// For parallel queries, we'll block and wait until the query running
@ -327,7 +336,7 @@ where
(v, Some(index))
}
Err(cycle) => (mk_cycle(query, qcx, cycle), None),
Err(cycle) => (mk_cycle(query, qcx, cycle.lift(qcx)), None),
}
}
@ -340,7 +349,7 @@ fn try_execute_query<Q, Qcx, const INCR: bool>(
dep_node: Option<DepNode>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
let state = query.query_state(qcx);
@ -405,14 +414,14 @@ where
fn execute_job<Q, Qcx, const INCR: bool>(
query: Q,
qcx: Qcx,
state: &QueryState<Q::Key>,
state: &QueryState<Q::Key, Qcx::QueryInfo>,
key: Q::Key,
key_hash: u64,
id: QueryJobId,
dep_node: Option<DepNode>,
) -> (Q::Value, Option<DepNodeIndex>)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
// Use `JobOwner` so the query will be poisoned if executing it panics.
@ -482,7 +491,7 @@ fn execute_job_non_incr<Q, Qcx>(
job_id: QueryJobId,
) -> (Q::Value, DepNodeIndex)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@ -521,7 +530,7 @@ fn execute_job_incr<Q, Qcx>(
job_id: QueryJobId,
) -> (Q::Value, DepNodeIndex)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
if !query.anon() && !query.eval_always() {
@ -576,7 +585,7 @@ fn try_load_from_disk_and_cache_in_memory<Q, Qcx>(
dep_node: &DepNode,
) -> Option<(Q::Value, DepNodeIndex)>
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
// Note this function can be called concurrently from the same query
@ -762,7 +771,7 @@ fn ensure_must_run<Q, Qcx>(
check_cache: bool,
) -> (bool, Option<DepNode>)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
if query.eval_always() {
@ -810,7 +819,7 @@ pub enum QueryMode {
#[inline(always)]
pub fn get_query_non_incr<Q, Qcx>(query: Q, qcx: Qcx, span: Span, key: Q::Key) -> Q::Value
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@ -827,7 +836,7 @@ pub fn get_query_incr<Q, Qcx>(
mode: QueryMode,
) -> Option<Q::Value>
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
debug_assert!(qcx.dep_context().dep_graph().is_fully_enabled());
@ -853,7 +862,7 @@ where
pub fn force_query<Q, Qcx>(query: Q, qcx: Qcx, key: Q::Key, dep_node: DepNode)
where
Q: QueryConfig<Qcx>,
Q: QueryDispatcher<Qcx>,
Qcx: QueryContext,
{
// We may be concurrently trying both execute and force a query.

View file

@ -26,7 +26,7 @@ use rustc_middle::metadata::{ModChild, Reexport};
use rustc_middle::ty::{Feed, Visibility};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind};
use rustc_span::{Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym};
use rustc_span::{Ident, Span, Symbol, kw, sym};
use thin_vec::ThinVec;
use tracing::debug;
@ -36,9 +36,9 @@ use crate::imports::{ImportData, ImportKind};
use crate::macros::{MacroRulesDecl, MacroRulesScope, MacroRulesScopeRef};
use crate::ref_mut::CmCell;
use crate::{
BindingKey, Decl, DeclData, DeclKind, ExternPreludeEntry, Finalize, MacroData, Module,
ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, ResolutionError, Resolver, Segment,
Used, VisResolutionError, errors,
BindingKey, Decl, DeclData, DeclKind, ExternPreludeEntry, Finalize, IdentKey, MacroData,
Module, ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, ResolutionError, Resolver,
Segment, Used, VisResolutionError, errors,
};
type Res = def::Res<NodeId>;
@ -48,12 +48,15 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
/// and report an error in case of a collision.
pub(crate) fn plant_decl_into_local_module(
&mut self,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
decl: Decl<'ra>,
) {
if let Err(old_decl) = self.try_plant_decl_into_local_module(ident, ns, decl, false) {
self.report_conflict(ident.0, ns, old_decl, decl);
if let Err(old_decl) =
self.try_plant_decl_into_local_module(ident, orig_ident_span, ns, decl, false)
{
self.report_conflict(ident, ns, old_decl, decl);
}
}
@ -61,7 +64,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn define_local(
&mut self,
parent: Module<'ra>,
ident: Ident,
orig_ident: Ident,
ns: Namespace,
res: Res,
vis: Visibility,
@ -69,15 +72,16 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
expn_id: LocalExpnId,
) {
let decl = self.arenas.new_def_decl(res, vis.to_def_id(), span, expn_id, Some(parent));
let ident = Macros20NormalizedIdent::new(ident);
self.plant_decl_into_local_module(ident, ns, decl);
let ident = IdentKey::new(orig_ident);
self.plant_decl_into_local_module(ident, orig_ident.span, ns, decl);
}
/// Create a name definitinon from the given components, and put it into the extern module.
fn define_extern(
&self,
parent: Module<'ra>,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
child_index: usize,
res: Res,
@ -102,7 +106,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let key =
BindingKey::new_disambiguated(ident, ns, || (child_index + 1).try_into().unwrap()); // 0 indicates no underscore
if self
.resolution_or_default(parent, key)
.resolution_or_default(parent, key, orig_ident_span)
.borrow_mut_unchecked()
.non_glob_decl
.replace(decl)
@ -279,8 +283,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
.unwrap_or_else(|| res.def_id()),
)
};
let ModChild { ident, res, vis, ref reexport_chain } = *child;
let ident = Macros20NormalizedIdent::new(ident);
let ModChild { ident: orig_ident, res, vis, ref reexport_chain } = *child;
let ident = IdentKey::new(orig_ident);
let span = child_span(self, reexport_chain, res);
let res = res.expect_non_local();
let expansion = parent_scope.expansion;
@ -293,7 +297,18 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Record primary definitions.
let define_extern = |ns| {
self.define_extern(parent, ident, ns, child_index, res, vis, span, expansion, ambig)
self.define_extern(
parent,
ident,
orig_ident.span,
ns,
child_index,
res,
vis,
span,
expansion,
ambig,
)
};
match res {
Res::Def(
@ -533,8 +548,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
if target.name != kw::Underscore {
self.r.per_ns(|this, ns| {
if !type_ns_only || ns == TypeNS {
let key = BindingKey::new(Macros20NormalizedIdent::new(target), ns);
this.resolution_or_default(current_module, key)
let key = BindingKey::new(IdentKey::new(target), ns);
this.resolution_or_default(current_module, key, target.span)
.borrow_mut(this)
.single_imports
.insert(import);
@ -974,7 +989,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
&mut self,
orig_name: Option<Symbol>,
item: &Item,
ident: Ident,
orig_ident: Ident,
local_def_id: LocalDefId,
vis: Visibility,
) {
@ -983,7 +998,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
let parent = parent_scope.module;
let expansion = parent_scope.expansion;
let (used, module, decl) = if orig_name.is_none() && ident.name == kw::SelfLower {
let (used, module, decl) = if orig_name.is_none() && orig_ident.name == kw::SelfLower {
self.r.dcx().emit_err(errors::ExternCrateSelfRequiresRenaming { span: sp });
return;
} else if orig_name == Some(kw::SelfLower) {
@ -1008,7 +1023,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
})
.unwrap_or((true, None, self.r.dummy_decl));
let import = self.r.arenas.alloc_import(ImportData {
kind: ImportKind::ExternCrate { source: orig_name, target: ident, id: item.id },
kind: ImportKind::ExternCrate { source: orig_name, target: orig_ident, id: item.id },
root_id: item.id,
parent_scope,
imported_module: CmCell::new(module),
@ -1026,7 +1041,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
}
self.r.potentially_unused_imports.push(import);
let import_decl = self.r.new_import_decl(decl, import);
let ident = Macros20NormalizedIdent::new(ident);
let ident = IdentKey::new(orig_ident);
if ident.name != kw::Underscore && parent == self.r.graph_root {
// FIXME: this error is technically unnecessary now when extern prelude is split into
// two scopes, remove it with lang team approval.
@ -1045,20 +1060,20 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
Entry::Occupied(mut occupied) => {
let entry = occupied.get_mut();
if entry.item_decl.is_some() {
let msg = format!("extern crate `{ident}` already in extern prelude");
let msg = format!("extern crate `{orig_ident}` already in extern prelude");
self.r.tcx.dcx().span_delayed_bug(item.span, msg);
} else {
entry.item_decl = Some((import_decl, orig_name.is_some()));
entry.item_decl = Some((import_decl, orig_ident.span, orig_name.is_some()));
}
entry
}
Entry::Vacant(vacant) => vacant.insert(ExternPreludeEntry {
item_decl: Some((import_decl, true)),
item_decl: Some((import_decl, orig_ident.span, true)),
flag_decl: None,
}),
};
}
self.r.plant_decl_into_local_module(ident, TypeNS, import_decl);
self.r.plant_decl_into_local_module(ident, orig_ident.span, TypeNS, import_decl);
}
/// Constructs the reduced graph for one foreign item.
@ -1159,7 +1174,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
if let Some(span) = import_all {
let import = macro_use_import(self, span, false);
self.r.potentially_unused_imports.push(import);
module.for_each_child_mut(self, |this, ident, ns, binding| {
module.for_each_child_mut(self, |this, ident, _, ns, binding| {
if ns == MacroNS {
let import =
if this.r.is_accessible_from(binding.vis(), this.parent_scope.module) {
@ -1270,7 +1285,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
let expansion = parent_scope.expansion;
let feed = self.r.feed(item.id);
let def_id = feed.key();
let (res, ident, span, macro_rules) = match &item.kind {
let (res, orig_ident, span, macro_rules) = match &item.kind {
ItemKind::MacroDef(ident, def) => {
(self.res(def_id), *ident, item.span, def.macro_rules)
}
@ -1293,8 +1308,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
self.r.local_macro_def_scopes.insert(def_id, parent_scope.module);
if macro_rules {
let ident = Macros20NormalizedIdent::new(ident);
self.r.macro_names.insert(ident.0);
let ident = IdentKey::new(orig_ident);
self.r.macro_names.insert(ident);
let is_macro_export = ast::attr::contains_name(&item.attrs, sym::macro_export);
let vis = if is_macro_export {
Visibility::Public
@ -1326,10 +1341,10 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
});
self.r.import_use_map.insert(import, Used::Other);
let import_decl = self.r.new_import_decl(decl, import);
self.r.plant_decl_into_local_module(ident, MacroNS, import_decl);
self.r.plant_decl_into_local_module(ident, orig_ident.span, MacroNS, import_decl);
} else {
self.r.check_reserved_macro_name(ident.0, res);
self.insert_unused_macro(ident.0, def_id, item.id);
self.r.check_reserved_macro_name(ident.name, orig_ident.span, res);
self.insert_unused_macro(orig_ident, def_id, item.id);
}
self.r.feed_visibility(feed, vis);
let scope = self.r.arenas.alloc_macro_rules_scope(MacroRulesScope::Def(
@ -1337,6 +1352,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
parent_macro_rules_scope: parent_scope.macro_rules,
decl,
ident,
orig_ident_span: orig_ident.span,
}),
));
self.r.macro_rules_scopes.insert(def_id, scope);
@ -1352,9 +1368,9 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
_ => self.resolve_visibility(&item.vis),
};
if !vis.is_public() {
self.insert_unused_macro(ident, def_id, item.id);
self.insert_unused_macro(orig_ident, def_id, item.id);
}
self.r.define_local(module, ident, MacroNS, res, vis, span, expansion);
self.r.define_local(module, orig_ident, MacroNS, res, vis, span, expansion);
self.r.feed_visibility(feed, vis);
self.parent_scope.macro_rules
}
@ -1496,7 +1512,7 @@ impl<'a, 'ra, 'tcx> Visitor<'a> for BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
{
// Don't add underscore names, they cannot be looked up anyway.
let impl_def_id = self.r.tcx.local_parent(local_def_id);
let key = BindingKey::new(Macros20NormalizedIdent::new(ident), ns);
let key = BindingKey::new(IdentKey::new(ident), ns);
self.r.impl_binding_keys.entry(impl_def_id).or_default().insert(key);
}

View file

@ -33,10 +33,10 @@ use rustc_session::lint::BuiltinLintDiag;
use rustc_session::lint::builtin::{
MACRO_USE_EXTERN_CRATE, UNUSED_EXTERN_CRATES, UNUSED_IMPORTS, UNUSED_QUALIFICATIONS,
};
use rustc_span::{DUMMY_SP, Ident, Macros20NormalizedIdent, Span, kw};
use rustc_span::{DUMMY_SP, Ident, Span, kw};
use crate::imports::{Import, ImportKind};
use crate::{DeclKind, LateDecl, Resolver, module_to_string};
use crate::{DeclKind, IdentKey, LateDecl, Resolver, module_to_string};
struct UnusedImport {
use_tree: ast::UseTree,
@ -203,7 +203,7 @@ impl<'a, 'ra, 'tcx> UnusedImportCheckVisitor<'a, 'ra, 'tcx> {
if self
.r
.extern_prelude
.get(&Macros20NormalizedIdent::new(extern_crate.ident))
.get(&IdentKey::new(extern_crate.ident))
.is_none_or(|entry| entry.introduced_by_item())
{
continue;

View file

@ -32,9 +32,7 @@ use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::edition::Edition;
use rustc_span::hygiene::MacroKind;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::{
BytePos, DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, SyntaxContext, kw, sym,
};
use rustc_span::{BytePos, DUMMY_SP, Ident, Span, Symbol, SyntaxContext, kw, sym};
use thin_vec::{ThinVec, thin_vec};
use tracing::{debug, instrument};
@ -47,10 +45,10 @@ use crate::imports::{Import, ImportKind};
use crate::late::{DiagMetadata, PatternSource, Rib};
use crate::{
AmbiguityError, AmbiguityKind, AmbiguityWarning, BindingError, BindingKey, Decl, DeclKind,
Finalize, ForwardGenericParamBanReason, HasGenericParams, LateDecl, MacroRulesScope, Module,
ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, PrivacyError, ResolutionError,
Resolver, Scope, ScopeSet, Segment, UseError, Used, VisResolutionError, errors as errs,
path_names_to_string,
Finalize, ForwardGenericParamBanReason, HasGenericParams, IdentKey, LateDecl, MacroRulesScope,
Module, ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, PrivacyError,
ResolutionError, Resolver, Scope, ScopeSet, Segment, UseError, Used, VisResolutionError,
errors as errs, path_names_to_string,
};
type Res = def::Res<ast::NodeId>;
@ -81,24 +79,14 @@ pub(crate) struct TypoSuggestion {
}
impl TypoSuggestion {
pub(crate) fn typo_from_ident(ident: Ident, res: Res) -> TypoSuggestion {
Self {
candidate: ident.name,
span: Some(ident.span),
res,
target: SuggestionTarget::SimilarlyNamed,
}
pub(crate) fn new(candidate: Symbol, span: Span, res: Res) -> TypoSuggestion {
Self { candidate, span: Some(span), res, target: SuggestionTarget::SimilarlyNamed }
}
pub(crate) fn typo_from_name(candidate: Symbol, res: Res) -> TypoSuggestion {
Self { candidate, span: None, res, target: SuggestionTarget::SimilarlyNamed }
}
pub(crate) fn single_item_from_ident(ident: Ident, res: Res) -> TypoSuggestion {
Self {
candidate: ident.name,
span: Some(ident.span),
res,
target: SuggestionTarget::SingleItem,
}
pub(crate) fn single_item(candidate: Symbol, span: Span, res: Res) -> TypoSuggestion {
Self { candidate, span: Some(span), res, target: SuggestionTarget::SingleItem }
}
}
@ -212,7 +200,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
pub(crate) fn report_conflict(
&mut self,
ident: Ident,
ident: IdentKey,
ns: Namespace,
old_binding: Decl<'ra>,
new_binding: Decl<'ra>,
@ -324,10 +312,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Check if the target of the use for both bindings is the same.
let duplicate = new_binding.res().opt_def_id() == old_binding.res().opt_def_id();
let has_dummy_span = new_binding.span.is_dummy() || old_binding.span.is_dummy();
let from_item = self
.extern_prelude
.get(&Macros20NormalizedIdent::new(ident))
.is_none_or(|entry| entry.introduced_by_item());
let from_item =
self.extern_prelude.get(&ident).is_none_or(|entry| entry.introduced_by_item());
// Only suggest removing an import if both bindings are to the same def, if both spans
// aren't dummy spans. Further, if both bindings are imports, then the ident must have
// been introduced by an item.
@ -531,10 +517,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
filter_fn: &impl Fn(Res) -> bool,
ctxt: Option<SyntaxContext>,
) {
module.for_each_child(self, |_this, ident, _ns, binding| {
module.for_each_child(self, |_this, ident, orig_ident_span, _ns, binding| {
let res = binding.res();
if filter_fn(res) && ctxt.is_none_or(|ctxt| ctxt == ident.span.ctxt()) {
names.push(TypoSuggestion::typo_from_ident(ident.0, res));
if filter_fn(res) && ctxt.is_none_or(|ctxt| ctxt == *ident.ctxt) {
names.push(TypoSuggestion::new(ident.name, orig_ident_span, res));
}
});
}
@ -1187,11 +1173,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
if filter_fn(res) {
suggestions.extend(
this.helper_attrs
.get(&expn_id)
.into_iter()
.flatten()
.map(|(ident, _)| TypoSuggestion::typo_from_ident(ident.0, res)),
this.helper_attrs.get(&expn_id).into_iter().flatten().map(
|&(ident, orig_ident_span, _)| {
TypoSuggestion::new(ident.name, orig_ident_span, res)
},
),
);
}
}
@ -1202,8 +1188,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let MacroRulesScope::Def(macro_rules_def) = macro_rules_scope.get() {
let res = macro_rules_def.decl.res();
if filter_fn(res) {
suggestions
.push(TypoSuggestion::typo_from_ident(macro_rules_def.ident.0, res))
suggestions.push(TypoSuggestion::new(
macro_rules_def.ident.name,
macro_rules_def.orig_ident_span,
res,
))
}
}
}
@ -1233,9 +1222,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
Scope::ExternPreludeItems => {
// Add idents from both item and flag scopes.
suggestions.extend(this.extern_prelude.keys().filter_map(|ident| {
suggestions.extend(this.extern_prelude.iter().filter_map(|(ident, entry)| {
let res = Res::Def(DefKind::Mod, CRATE_DEF_ID.to_def_id());
filter_fn(res).then_some(TypoSuggestion::typo_from_ident(ident.0, res))
filter_fn(res).then_some(TypoSuggestion::new(ident.name, entry.span(), res))
}));
}
Scope::ExternPreludeFlags => {}
@ -1244,7 +1233,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
suggestions.extend(
this.registered_tools
.iter()
.map(|ident| TypoSuggestion::typo_from_ident(*ident, res)),
.map(|ident| TypoSuggestion::new(ident.name, ident.span, res)),
);
}
Scope::StdLibPrelude => {
@ -1329,7 +1318,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
{
let in_module_is_extern = !in_module.def_id().is_local();
in_module.for_each_child(self, |this, ident, ns, name_binding| {
in_module.for_each_child(self, |this, ident, orig_ident_span, ns, name_binding| {
// Avoid non-importable candidates.
if name_binding.is_assoc_item()
&& !this.tcx.features().import_trait_associated_functions()
@ -1382,7 +1371,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if ident.name == lookup_ident.name
&& ns == namespace
&& in_module != parent_scope.module
&& !ident.span.normalize_to_macros_2_0().from_expansion()
&& ident.ctxt.is_root()
&& filter_fn(res)
{
// create the path
@ -1395,7 +1384,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
};
segms.append(&mut path_segments.clone());
segms.push(ast::PathSegment::from_ident(ident.0));
segms.push(ast::PathSegment::from_ident(ident.orig(orig_ident_span)));
let path = Path { span: name_binding.span, segments: segms, tokens: None };
if child_accessible
@ -1468,7 +1457,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(def_id) = name_binding.res().module_like_def_id() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident.0));
path_segments.push(ast::PathSegment::from_ident(ident.orig(orig_ident_span)));
let alias_import = if let DeclKind::Import { import, .. } = name_binding.kind
&& let ImportKind::ExternCrate { source: Some(_), .. } = import.kind
@ -1557,8 +1546,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
);
if lookup_ident.span.at_least_rust_2018() {
for &ident in self.extern_prelude.keys() {
if ident.span.from_expansion() {
for (ident, entry) in &self.extern_prelude {
if entry.span().from_expansion() {
// Idents are adjusted to the root context before being
// resolved in the extern prelude, so reporting this to the
// user is no help. This skips the injected
@ -1582,7 +1571,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
self.resolutions(parent_scope.module).borrow().iter().any(
|(key, name_resolution)| {
if key.ns == TypeNS
&& key.ident == ident
&& key.ident == *ident
&& let Some(decl) = name_resolution.borrow().best_decl()
{
match decl.res() {
@ -1601,7 +1590,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if needs_disambiguation {
crate_path.push(ast::PathSegment::path_root(rustc_span::DUMMY_SP));
}
crate_path.push(ast::PathSegment::from_ident(ident.0));
crate_path.push(ast::PathSegment::from_ident(ident.orig(entry.span())));
suggestions.extend(self.lookup_import_candidates_from_module(
lookup_ident,
@ -1777,7 +1766,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
return;
}
if self.macro_names.contains(&ident.normalize_to_macros_2_0()) {
if self.macro_names.contains(&IdentKey::new(ident)) {
err.subdiagnostic(AddedMacroUse);
return;
}
@ -2007,8 +1996,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
matches!(scope2, Scope::ExternPreludeFlags)
&& self
.extern_prelude
.get(&Macros20NormalizedIdent::new(ident))
.is_some_and(|entry| entry.item_decl.map(|(b, _)| b) == Some(b1))
.get(&IdentKey::new(ident))
.is_some_and(|entry| entry.item_decl.map(|(b, ..)| b) == Some(b1))
};
let (b1, b2, scope1, scope2, swapped) = if b2.span.is_dummy() && !b1.span.is_dummy() {
// We have to print the span-less alternative first, otherwise formatting looks bad.
@ -2914,7 +2903,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
return None;
}
let binding_key = BindingKey::new(Macros20NormalizedIdent::new(ident), MacroNS);
let binding_key = BindingKey::new(IdentKey::new(ident), MacroNS);
let binding = self.resolution(crate_module, binding_key)?.binding()?;
let Res::Def(DefKind::Macro(kinds), _) = binding.res() else {
return None;

View file

@ -96,13 +96,10 @@ impl<'a, 'ra, 'tcx> EffectiveVisibilitiesVisitor<'a, 'ra, 'tcx> {
// is the maximum value among visibilities of declarations corresponding to that def id.
for (decl, eff_vis) in visitor.import_effective_visibilities.iter() {
let DeclKind::Import { import, .. } = decl.kind else { unreachable!() };
if !decl.is_ambiguity_recursive() {
if let Some(node_id) = import.id() {
r.effective_visibilities.update_eff_vis(r.local_def_id(node_id), eff_vis, r.tcx)
}
} else if decl.ambiguity.get().is_some()
&& eff_vis.is_public_at_level(Level::Reexported)
{
if let Some(node_id) = import.id() {
r.effective_visibilities.update_eff_vis(r.local_def_id(node_id), eff_vis, r.tcx)
}
if decl.ambiguity.get().is_some() && eff_vis.is_public_at_level(Level::Reexported) {
exported_ambiguities.insert(*decl);
}
}
@ -123,31 +120,13 @@ impl<'a, 'ra, 'tcx> EffectiveVisibilitiesVisitor<'a, 'ra, 'tcx> {
// Set the given effective visibility level to `Level::Direct` and
// sets the rest of the `use` chain to `Level::Reexported` until
// we hit the actual exported item.
//
// If the binding is ambiguous, put the root ambiguity binding and all reexports
// leading to it into the table. They are used by the `ambiguous_glob_reexports`
// lint. For all bindings added to the table this way `is_ambiguity` returns true.
let is_ambiguity =
|decl: Decl<'ra>, warn: bool| decl.ambiguity.get().is_some() && !warn;
let mut parent_id = ParentId::Def(module_id);
let mut warn_ambiguity = decl.warn_ambiguity.get();
while let DeclKind::Import { source_decl, .. } = decl.kind {
self.update_import(decl, parent_id);
if is_ambiguity(decl, warn_ambiguity) {
// Stop at the root ambiguity, further bindings in the chain should not
// be reexported because the root ambiguity blocks any access to them.
// (Those further bindings are most likely not ambiguities themselves.)
break;
}
parent_id = ParentId::Import(decl);
decl = source_decl;
warn_ambiguity |= source_decl.warn_ambiguity.get();
}
if !is_ambiguity(decl, warn_ambiguity)
&& let Some(def_id) = decl.res().opt_def_id().and_then(|id| id.as_local())
{
if let Some(def_id) = decl.res().opt_def_id().and_then(|id| id.as_local()) {
self.update_def(def_id, decl.vis().expect_local(), parent_id);
}
}

View file

@ -601,7 +601,7 @@ pub(crate) struct ProcMacroDeriveResolutionFallback {
#[label]
pub span: Span,
pub ns_descr: &'static str,
pub ident: Ident,
pub ident: Symbol,
}
#[derive(LintDiagnostic)]
@ -1151,7 +1151,7 @@ pub(crate) struct CannotUseThroughAnImport {
pub(crate) struct NameReservedInAttributeNamespace {
#[primary_span]
pub(crate) span: Span,
pub(crate) ident: Ident,
pub(crate) ident: Symbol,
}
#[derive(Diagnostic)]

View file

@ -10,11 +10,12 @@ use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK;
use rustc_session::parse::feature_err;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext};
use rustc_span::{Ident, Macros20NormalizedIdent, Span, kw, sym};
use rustc_span::{Ident, Span, kw, sym};
use smallvec::SmallVec;
use tracing::{debug, instrument};
use crate::errors::{ParamKindInEnumDiscriminant, ParamKindInNonTrivialAnonConst};
use crate::hygiene::Macros20NormalizedSyntaxContext;
use crate::imports::{Import, NameResolution};
use crate::late::{
ConstantHasGenerics, DiagMetadata, NoConstantGenericsReason, PathSource, Rib, RibKind,
@ -22,7 +23,7 @@ use crate::late::{
use crate::macros::{MacroRulesScope, sub_namespace_match};
use crate::{
AmbiguityError, AmbiguityKind, AmbiguityWarning, BindingKey, CmResolver, Decl, DeclKind,
Determinacy, Finalize, ImportKind, LateDecl, Module, ModuleKind, ModuleOrUniformRoot,
Determinacy, Finalize, IdentKey, ImportKind, LateDecl, Module, ModuleKind, ModuleOrUniformRoot,
ParentScope, PathResult, PrivacyError, Res, ResolutionError, Resolver, Scope, ScopeSet,
Segment, Stage, Used, errors,
};
@ -61,7 +62,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
CmResolver<'_, 'ra, 'tcx>,
Scope<'ra>,
UsePrelude,
Span,
Macros20NormalizedSyntaxContext,
) -> ControlFlow<T>,
) -> Option<T> {
// General principles:
@ -127,7 +128,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
TypeNS | ValueNS => Scope::ModuleNonGlobs(module, None),
MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
};
let mut ctxt = orig_ctxt.normalize_to_macros_2_0();
let mut ctxt = Macros20NormalizedSyntaxContext::new(orig_ctxt.ctxt());
let mut use_prelude = !module.no_implicit_prelude;
loop {
@ -198,7 +199,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Scope::ModuleGlobs(..) if module_only => break,
Scope::ModuleGlobs(..) if module_and_extern_prelude => match ns {
TypeNS => {
ctxt.adjust(ExpnId::root());
ctxt.update_unchecked(|ctxt| ctxt.adjust(ExpnId::root()));
Scope::ExternPreludeItems
}
ValueNS | MacroNS => break,
@ -210,7 +211,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Scope::ModuleNonGlobs(parent_module, lint_id.or(prev_lint_id))
}
None => {
ctxt.adjust(ExpnId::root());
ctxt.update_unchecked(|ctxt| ctxt.adjust(ExpnId::root()));
match ns {
TypeNS => Scope::ExternPreludeItems,
ValueNS => Scope::StdLibPrelude,
@ -240,12 +241,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn hygienic_lexical_parent(
&self,
module: Module<'ra>,
span: &mut Span,
ctxt: &mut Macros20NormalizedSyntaxContext,
derive_fallback_lint_id: Option<NodeId>,
) -> Option<(Module<'ra>, Option<NodeId>)> {
let ctxt = span.ctxt();
if !module.expansion.outer_expn_is_descendant_of(ctxt) {
return Some((self.expn_def_scope(span.remove_mark()), None));
if !module.expansion.outer_expn_is_descendant_of(**ctxt) {
let expn_id = ctxt.update_unchecked(|ctxt| ctxt.remove_mark());
return Some((self.expn_def_scope(expn_id), None));
}
if let ModuleKind::Block = module.kind {
@ -275,7 +276,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let ext = &self.get_macro_by_def_id(def_id).ext;
if ext.builtin_name.is_none()
&& ext.macro_kinds() == MacroKinds::DERIVE
&& parent.expansion.outer_expn_is_descendant_of(ctxt)
&& parent.expansion.outer_expn_is_descendant_of(**ctxt)
{
return Some((parent, derive_fallback_lint_id));
}
@ -439,11 +440,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
orig_ident.span,
derive_fallback_lint_id,
|mut this, scope, use_prelude, ctxt| {
let ident = Ident::new(orig_ident.name, ctxt);
// The passed `ctxt` is already normalized, so avoid expensive double normalization.
let ident = Macros20NormalizedIdent(ident);
let ident = IdentKey { name: orig_ident.name, ctxt };
let res = match this.reborrow().resolve_ident_in_scope(
ident,
orig_ident.span,
ns,
scope,
use_prelude,
@ -515,7 +515,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn resolve_ident_in_scope<'r>(
mut self: CmResolver<'r, 'ra, 'tcx>,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
scope: Scope<'ra>,
use_prelude: UsePrelude,
@ -531,7 +532,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(decl) = self
.helper_attrs
.get(&expn_id)
.and_then(|attrs| attrs.iter().rfind(|(i, _)| ident == *i).map(|(_, d)| *d))
.and_then(|attrs| attrs.iter().rfind(|(i, ..)| ident == *i).map(|(.., d)| *d))
{
Ok(decl)
} else {
@ -587,6 +588,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let decl = self.reborrow().resolve_ident_in_module_non_globs_unadjusted(
module,
ident,
orig_ident_span,
ns,
adjusted_parent_scope,
if matches!(scope_set, ScopeSet::Module(..)) {
@ -604,11 +606,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
self.get_mut().lint_buffer.buffer_lint(
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
lint_id,
ident.span,
orig_ident_span,
errors::ProcMacroDeriveResolutionFallback {
span: ident.span,
span: orig_ident_span,
ns_descr: ns.descr(),
ident: ident.0,
ident: ident.name,
},
);
}
@ -637,6 +639,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let binding = self.reborrow().resolve_ident_in_module_globs_unadjusted(
module,
ident,
orig_ident_span,
ns,
adjusted_parent_scope,
if matches!(scope_set, ScopeSet::Module(..)) {
@ -654,11 +657,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
self.get_mut().lint_buffer.buffer_lint(
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
lint_id,
ident.span,
orig_ident_span,
errors::ProcMacroDeriveResolutionFallback {
span: ident.span,
span: orig_ident_span,
ns_descr: ns.descr(),
ident: ident.0,
ident: ident.name,
},
);
}
@ -683,7 +686,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
None => Err(Determinacy::Determined),
},
Scope::ExternPreludeItems => {
match self.reborrow().extern_prelude_get_item(ident, finalize.is_some()) {
match self.reborrow().extern_prelude_get_item(
ident,
orig_ident_span,
finalize.is_some(),
) {
Some(decl) => Ok(decl),
None => Err(Determinacy::determined(
self.graph_root.unexpanded_invocations.borrow().is_empty(),
@ -691,7 +698,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
}
Scope::ExternPreludeFlags => {
match self.extern_prelude_get_flag(ident, finalize.is_some()) {
match self.extern_prelude_get_flag(ident, orig_ident_span, finalize.is_some()) {
Some(decl) => Ok(decl),
None => Err(Determinacy::Determined),
}
@ -704,7 +711,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let mut result = Err(Determinacy::Determined);
if let Some(prelude) = self.prelude
&& let Ok(decl) = self.reborrow().resolve_ident_in_scope_set(
ident.0,
ident.orig(orig_ident_span.with_ctxt(*ident.ctxt)),
ScopeSet::Module(ns, prelude),
parent_scope,
None,
@ -723,26 +730,26 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Some(decl) => {
if matches!(ident.name, sym::f16)
&& !self.tcx.features().f16()
&& !ident.span.allows_unstable(sym::f16)
&& !orig_ident_span.allows_unstable(sym::f16)
&& finalize.is_some()
{
feature_err(
self.tcx.sess,
sym::f16,
ident.span,
orig_ident_span,
"the type `f16` is unstable",
)
.emit();
}
if matches!(ident.name, sym::f128)
&& !self.tcx.features().f128()
&& !ident.span.allows_unstable(sym::f128)
&& !orig_ident_span.allows_unstable(sym::f128)
&& finalize.is_some()
{
feature_err(
self.tcx.sess,
sym::f128,
ident.span,
orig_ident_span,
"the type `f128` is unstable",
)
.emit();
@ -1001,7 +1008,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn resolve_ident_in_module_non_globs_unadjusted<'r>(
mut self: CmResolver<'r, 'ra, 'tcx>,
module: Module<'ra>,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
parent_scope: &ParentScope<'ra>,
shadowing: Shadowing,
@ -1016,7 +1024,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// doesn't need to be mutable. It will fail when there is a cycle of imports, and without
// the exclusive access infinite recursion will crash the compiler with stack overflow.
let resolution = &*self
.resolution_or_default(module, key)
.resolution_or_default(module, key, orig_ident_span)
.try_borrow_mut_unchecked()
.map_err(|_| ControlFlow::Continue(Determined))?;
@ -1024,7 +1032,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(finalize) = finalize {
return self.get_mut().finalize_module_binding(
ident.0,
ident,
orig_ident_span,
binding,
parent_scope,
module,
@ -1064,7 +1073,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn resolve_ident_in_module_globs_unadjusted<'r>(
mut self: CmResolver<'r, 'ra, 'tcx>,
module: Module<'ra>,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
parent_scope: &ParentScope<'ra>,
shadowing: Shadowing,
@ -1077,7 +1087,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// doesn't need to be mutable. It will fail when there is a cycle of imports, and without
// the exclusive access infinite recursion will crash the compiler with stack overflow.
let resolution = &*self
.resolution_or_default(module, key)
.resolution_or_default(module, key, orig_ident_span)
.try_borrow_mut_unchecked()
.map_err(|_| ControlFlow::Continue(Determined))?;
@ -1085,7 +1095,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(finalize) = finalize {
return self.get_mut().finalize_module_binding(
ident.0,
ident,
orig_ident_span,
binding,
parent_scope,
module,
@ -1154,8 +1165,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
None => return Err(ControlFlow::Continue(Undetermined)),
};
let tmp_parent_scope;
let (mut adjusted_parent_scope, mut ident) = (parent_scope, ident);
match ident.0.span.glob_adjust(module.expansion, glob_import.span) {
let (mut adjusted_parent_scope, mut ctxt) = (parent_scope, *ident.ctxt);
match ctxt.glob_adjust(module.expansion, glob_import.span) {
Some(Some(def)) => {
tmp_parent_scope =
ParentScope { module: self.expn_def_scope(def), ..*parent_scope };
@ -1165,7 +1176,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
None => continue,
};
let result = self.reborrow().resolve_ident_in_scope_set(
ident.0,
ident.orig(orig_ident_span.with_ctxt(ctxt)),
ScopeSet::Module(ns, module),
adjusted_parent_scope,
None,
@ -1191,7 +1202,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn finalize_module_binding(
&mut self,
ident: Ident,
ident: IdentKey,
orig_ident_span: Span,
binding: Option<Decl<'ra>>,
parent_scope: &ParentScope<'ra>,
module: Module<'ra>,
@ -1204,6 +1216,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
return Err(ControlFlow::Continue(Determined));
};
let ident = ident.orig(orig_ident_span);
if !self.is_accessible_from(binding.vis(), parent_scope.module) {
if report_private {
self.privacy_errors.push(PrivacyError {

View file

@ -20,7 +20,7 @@ use rustc_session::lint::builtin::{
use rustc_session::parse::feature_err;
use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::hygiene::LocalExpnId;
use rustc_span::{Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym};
use rustc_span::{Ident, Span, Symbol, kw, sym};
use tracing::debug;
use crate::Namespace::{self, *};
@ -33,8 +33,8 @@ use crate::errors::{
use crate::ref_mut::CmCell;
use crate::{
AmbiguityError, BindingKey, CmResolver, Decl, DeclData, DeclKind, Determinacy, Finalize,
ImportSuggestion, Module, ModuleOrUniformRoot, ParentScope, PathResult, PerNS, ResolutionError,
Resolver, ScopeSet, Segment, Used, module_to_string, names_to_string,
IdentKey, ImportSuggestion, Module, ModuleOrUniformRoot, ParentScope, PathResult, PerNS,
ResolutionError, Resolver, ScopeSet, Segment, Used, module_to_string, names_to_string,
};
type Res = def::Res<NodeId>;
@ -239,18 +239,23 @@ impl<'ra> ImportData<'ra> {
}
/// Records information about the resolution of a name in a namespace of a module.
#[derive(Clone, Default, Debug)]
#[derive(Clone, Debug)]
pub(crate) struct NameResolution<'ra> {
/// Single imports that may define the name in the namespace.
/// Imports are arena-allocated, so it's ok to use pointers as keys.
pub single_imports: FxIndexSet<Import<'ra>>,
/// The non-glob declaration for this name, if it is known to exist.
pub non_glob_decl: Option<Decl<'ra>>,
pub non_glob_decl: Option<Decl<'ra>> = None,
/// The glob declaration for this name, if it is known to exist.
pub glob_decl: Option<Decl<'ra>>,
pub glob_decl: Option<Decl<'ra>> = None,
pub orig_ident_span: Span,
}
impl<'ra> NameResolution<'ra> {
pub(crate) fn new(orig_ident_span: Span) -> Self {
NameResolution { single_imports: FxIndexSet::default(), orig_ident_span, .. }
}
/// Returns the binding for the name if it is known or None if it not known.
pub(crate) fn binding(&self) -> Option<Decl<'ra>> {
self.best_decl().and_then(|binding| {
@ -417,14 +422,15 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
/// and return existing declaration if there is a collision.
pub(crate) fn try_plant_decl_into_local_module(
&mut self,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
decl: Decl<'ra>,
warn_ambiguity: bool,
) -> Result<(), Decl<'ra>> {
let module = decl.parent_module.unwrap();
let res = decl.res();
self.check_reserved_macro_name(ident.0, res);
self.check_reserved_macro_name(ident.name, orig_ident_span, res);
// Even if underscore names cannot be looked up, we still need to add them to modules,
// because they can be fetched by glob imports from those modules, and bring traits
// into scope both directly and through glob imports.
@ -432,46 +438,52 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
module.underscore_disambiguator.update_unchecked(|d| d + 1);
module.underscore_disambiguator.get()
});
self.update_local_resolution(module, key, warn_ambiguity, |this, resolution| {
if let Some(old_decl) = resolution.best_decl() {
assert_ne!(decl, old_decl);
assert!(!decl.warn_ambiguity.get());
if res == Res::Err && old_decl.res() != Res::Err {
// Do not override real declarations with `Res::Err`s from error recovery.
return Ok(());
}
match (old_decl.is_glob_import(), decl.is_glob_import()) {
(true, true) => {
resolution.glob_decl =
Some(this.select_glob_decl(old_decl, decl, warn_ambiguity));
self.update_local_resolution(
module,
key,
orig_ident_span,
warn_ambiguity,
|this, resolution| {
if let Some(old_decl) = resolution.best_decl() {
assert_ne!(decl, old_decl);
assert!(!decl.warn_ambiguity.get());
if res == Res::Err && old_decl.res() != Res::Err {
// Do not override real declarations with `Res::Err`s from error recovery.
return Ok(());
}
(old_glob @ true, false) | (old_glob @ false, true) => {
let (glob_decl, non_glob_decl) =
if old_glob { (old_decl, decl) } else { (decl, old_decl) };
resolution.non_glob_decl = Some(non_glob_decl);
if let Some(old_glob_decl) = resolution.glob_decl
&& old_glob_decl != glob_decl
{
match (old_decl.is_glob_import(), decl.is_glob_import()) {
(true, true) => {
resolution.glob_decl =
Some(this.select_glob_decl(old_glob_decl, glob_decl, false));
} else {
resolution.glob_decl = Some(glob_decl);
Some(this.select_glob_decl(old_decl, decl, warn_ambiguity));
}
(old_glob @ true, false) | (old_glob @ false, true) => {
let (glob_decl, non_glob_decl) =
if old_glob { (old_decl, decl) } else { (decl, old_decl) };
resolution.non_glob_decl = Some(non_glob_decl);
if let Some(old_glob_decl) = resolution.glob_decl
&& old_glob_decl != glob_decl
{
resolution.glob_decl =
Some(this.select_glob_decl(old_glob_decl, glob_decl, false));
} else {
resolution.glob_decl = Some(glob_decl);
}
}
(false, false) => {
return Err(old_decl);
}
}
(false, false) => {
return Err(old_decl);
} else {
if decl.is_glob_import() {
resolution.glob_decl = Some(decl);
} else {
resolution.non_glob_decl = Some(decl);
}
}
} else {
if decl.is_glob_import() {
resolution.glob_decl = Some(decl);
} else {
resolution.non_glob_decl = Some(decl);
}
}
Ok(())
})
Ok(())
},
)
}
// Use `f` to mutate the resolution of the name in the module.
@ -480,6 +492,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
&mut self,
module: Module<'ra>,
key: BindingKey,
orig_ident_span: Span,
warn_ambiguity: bool,
f: F,
) -> T
@ -489,7 +502,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Ensure that `resolution` isn't borrowed when defining in the module's glob importers,
// during which the resolution might end up getting re-defined via a glob cycle.
let (binding, t, warn_ambiguity) = {
let resolution = &mut *self.resolution_or_default(module, key).borrow_mut_unchecked();
let resolution = &mut *self
.resolution_or_default(module, key, orig_ident_span)
.borrow_mut_unchecked();
let old_decl = resolution.binding();
let t = f(self, resolution);
@ -510,7 +525,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Define or update `binding` in `module`s glob importers.
for import in glob_importers.iter() {
let mut ident = key.ident;
let scope = match ident.0.span.reverse_glob_adjust(module.expansion, import.span) {
let scope = match ident
.ctxt
.update_unchecked(|ctxt| ctxt.reverse_glob_adjust(module.expansion, import.span))
{
Some(Some(def)) => self.expn_def_scope(def),
Some(None) => import.parent_scope.module,
None => continue,
@ -519,6 +537,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let import_decl = self.new_import_decl(binding, *import);
let _ = self.try_plant_decl_into_local_module(
ident,
orig_ident_span,
key.ns,
import_decl,
warn_ambiguity,
@ -540,14 +559,26 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let dummy_decl = self.new_import_decl(dummy_decl, import);
self.per_ns(|this, ns| {
let module = import.parent_scope.module;
let ident = Macros20NormalizedIdent::new(target);
let _ = this.try_plant_decl_into_local_module(ident, ns, dummy_decl, false);
let ident = IdentKey::new(target);
let _ = this.try_plant_decl_into_local_module(
ident,
target.span,
ns,
dummy_decl,
false,
);
// Don't remove underscores from `single_imports`, they were never added.
if target.name != kw::Underscore {
let key = BindingKey::new(ident, ns);
this.update_local_resolution(module, key, false, |_, resolution| {
resolution.single_imports.swap_remove(&import);
})
this.update_local_resolution(
module,
key,
target.span,
false,
|_, resolution| {
resolution.single_imports.swap_remove(&import);
},
)
}
});
self.record_use(target, dummy_decl, Used::Other);
@ -687,7 +718,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
import.root_id,
import.root_span,
BuiltinLintDiag::AmbiguousGlobReexports {
name: key.ident.to_string(),
name: key.ident.name.to_string(),
namespace: key.ns.descr().to_string(),
first_reexport_span: import.root_span,
duplicate_reexport_span: amb_binding.span,
@ -922,7 +953,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// We need the `target`, `source` can be extracted.
let import_decl = this.new_import_decl(binding, import);
this.get_mut_unchecked().plant_decl_into_local_module(
Macros20NormalizedIdent::new(target),
IdentKey::new(target),
target.span,
ns,
import_decl,
);
@ -931,10 +963,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Err(Determinacy::Determined) => {
// Don't remove underscores from `single_imports`, they were never added.
if target.name != kw::Underscore {
let key = BindingKey::new(Macros20NormalizedIdent::new(target), ns);
let key = BindingKey::new(IdentKey::new(target), ns);
this.get_mut_unchecked().update_local_resolution(
parent,
key,
target.span,
false,
|_, resolution| {
resolution.single_imports.swap_remove(&import);
@ -1531,15 +1564,19 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
.borrow()
.iter()
.filter_map(|(key, resolution)| {
resolution.borrow().binding().map(|binding| (*key, binding))
let resolution = resolution.borrow();
resolution.binding().map(|binding| (*key, binding, resolution.orig_ident_span))
})
.collect::<Vec<_>>();
for (mut key, binding) in bindings {
let scope = match key.ident.0.span.reverse_glob_adjust(module.expansion, import.span) {
Some(Some(def)) => self.expn_def_scope(def),
Some(None) => import.parent_scope.module,
None => continue,
};
for (mut key, binding, orig_ident_span) in bindings {
let scope =
match key.ident.ctxt.update_unchecked(|ctxt| {
ctxt.reverse_glob_adjust(module.expansion, import.span)
}) {
Some(Some(def)) => self.expn_def_scope(def),
Some(None) => import.parent_scope.module,
None => continue,
};
if self.is_accessible_from(binding.vis(), scope) {
let import_decl = self.new_import_decl(binding, import);
let warn_ambiguity = self
@ -1548,6 +1585,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
.is_some_and(|binding| binding.warn_ambiguity_recursive());
let _ = self.try_plant_decl_into_local_module(
key.ident,
orig_ident_span,
key.ns,
import_decl,
warn_ambiguity,
@ -1575,19 +1613,16 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let mut children = Vec::new();
let mut ambig_children = Vec::new();
module.for_each_child(self, |this, ident, _, binding| {
module.for_each_child(self, |this, ident, orig_ident_span, _, binding| {
let res = binding.res().expect_non_local();
if res != def::Res::Err {
let child = |reexport_chain| ModChild {
ident: ident.0,
res,
vis: binding.vis(),
reexport_chain,
};
let ident = ident.orig(orig_ident_span);
let child =
|reexport_chain| ModChild { ident, res, vis: binding.vis(), reexport_chain };
if let Some((ambig_binding1, ambig_binding2)) = binding.descent_to_ambiguity() {
let main = child(ambig_binding1.reexport_chain(this));
let second = ModChild {
ident: ident.0,
ident,
res: ambig_binding2.res().expect_non_local(),
vis: ambig_binding2.vis(),
reexport_chain: ambig_binding2.reexport_chain(this),

View file

@ -37,15 +37,15 @@ use rustc_session::config::{CrateType, ResolveDocLinks};
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_span::source_map::{Spanned, respan};
use rustc_span::{BytePos, DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym};
use rustc_span::{BytePos, DUMMY_SP, Ident, Span, Symbol, kw, sym};
use smallvec::{SmallVec, smallvec};
use thin_vec::ThinVec;
use tracing::{debug, instrument, trace};
use crate::{
BindingError, BindingKey, Decl, Finalize, LateDecl, Module, ModuleOrUniformRoot, ParentScope,
PathResult, ResolutionError, Resolver, Segment, Stage, TyCtxt, UseError, Used, errors,
path_names_to_string, rustdoc,
BindingError, BindingKey, Decl, Finalize, IdentKey, LateDecl, Module, ModuleOrUniformRoot,
ParentScope, PathResult, ResolutionError, Resolver, Segment, Stage, TyCtxt, UseError, Used,
errors, path_names_to_string, rustdoc,
};
mod diagnostics;
@ -3650,7 +3650,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
return;
};
ident.span.normalize_to_macros_2_0_and_adjust(module.expansion);
let key = BindingKey::new(Macros20NormalizedIdent::new(ident), ns);
let key = BindingKey::new(IdentKey::new(ident), ns);
let mut decl = self.r.resolution(module, key).and_then(|r| r.best_decl());
debug!(?decl);
if decl.is_none() {
@ -3661,7 +3661,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
TypeNS => ValueNS,
_ => ns,
};
let key = BindingKey::new(Macros20NormalizedIdent::new(ident), ns);
let key = BindingKey::new(IdentKey::new(ident), ns);
decl = self.r.resolution(module, key).and_then(|r| r.best_decl());
debug!(?decl);
}

View file

@ -1620,22 +1620,24 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
if let PathResult::Module(ModuleOrUniformRoot::Module(module)) =
self.resolve_path(mod_path, None, None, *source)
{
let targets: Vec<_> =
self.r
.resolutions(module)
.borrow()
.iter()
.filter_map(|(key, resolution)| {
resolution.borrow().best_decl().map(|binding| binding.res()).and_then(
|res| if filter_fn(res) { Some((*key, res)) } else { None },
)
let targets: Vec<_> = self
.r
.resolutions(module)
.borrow()
.iter()
.filter_map(|(key, resolution)| {
let resolution = resolution.borrow();
resolution.best_decl().map(|binding| binding.res()).and_then(|res| {
if filter_fn(res) {
Some((key.ident.name, resolution.orig_ident_span, res))
} else {
None
}
})
.collect();
if let [target] = targets.as_slice() {
return Some(TypoSuggestion::single_item_from_ident(
target.0.ident.0,
target.1,
));
})
.collect();
if let &[(name, orig_ident_span, res)] = targets.as_slice() {
return Some(TypoSuggestion::single_item(name, orig_ident_span, res));
}
}
}
@ -2662,7 +2664,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
// Locals and type parameters
for (ident, &res) in &rib.bindings {
if filter_fn(res) && ident.span.ctxt() == rib_ctxt {
names.push(TypoSuggestion::typo_from_ident(*ident, res));
names.push(TypoSuggestion::new(ident.name, ident.span, res));
}
}
@ -2824,7 +2826,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
break;
}
in_module.for_each_child(self.r, |r, ident, _, name_binding| {
in_module.for_each_child(self.r, |r, ident, orig_ident_span, _, name_binding| {
// abort if the module is already found or if name_binding is private external
if result.is_some() || !name_binding.vis().is_visible_locally() {
return;
@ -2832,7 +2834,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
if let Some(module_def_id) = name_binding.res().module_like_def_id() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident.0));
path_segments.push(ast::PathSegment::from_ident(ident.orig(orig_ident_span)));
let doc_visible = doc_visible
&& (module_def_id.is_local() || !r.tcx.is_doc_hidden(module_def_id));
if module_def_id == def_id {
@ -2868,10 +2870,10 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
fn collect_enum_ctors(&self, def_id: DefId) -> Option<Vec<(Path, DefId, CtorKind)>> {
self.find_module(def_id).map(|(enum_module, enum_import_suggestion)| {
let mut variants = Vec::new();
enum_module.for_each_child(self.r, |_, ident, _, name_binding| {
enum_module.for_each_child(self.r, |_, ident, orig_ident_span, _, name_binding| {
if let Res::Def(DefKind::Ctor(CtorOf::Variant, kind), def_id) = name_binding.res() {
let mut segms = enum_import_suggestion.path.segments.clone();
segms.push(ast::PathSegment::from_ident(ident.0));
segms.push(ast::PathSegment::from_ident(ident.orig(orig_ident_span)));
let path = Path { span: name_binding.span, segments: segms, tokens: None };
variants.push((path, def_id, kind));
}

View file

@ -33,6 +33,7 @@ use std::sync::Arc;
use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion};
use effective_visibilities::EffectiveVisibilitiesVisitor;
use errors::{ParamKindInEnumDiscriminant, ParamKindInNonTrivialAnonConst};
use hygiene::Macros20NormalizedSyntaxContext;
use imports::{Import, ImportData, ImportKind, NameResolution, PendingDecl};
use late::{
ForwardGenericParamBanReason, HasGenericParams, PathSource, PatternSource,
@ -75,7 +76,7 @@ use rustc_query_system::ich::StableHashingContext;
use rustc_session::config::CrateType;
use rustc_session::lint::builtin::PRIVATE_MACRO_USE;
use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind, SyntaxContext, Transparency};
use rustc_span::{DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym};
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
use smallvec::{SmallVec, smallvec};
use tracing::debug;
@ -552,6 +553,40 @@ impl ModuleKind {
}
}
/// Combination of a symbol and its macros 2.0 normalized hygiene context.
/// Used as a key in various kinds of name containers, including modules (as a part of slightly
/// larger `BindingKey`) and preludes.
///
/// Often passed around together with `orig_ident_span: Span`, which is an unnormalized span
/// of the original `Ident` from which `IdentKey` was obtained. This span is not used in map keys,
/// but used in a number of other scenarios - diagnostics, edition checks, `allow_unstable` checks
/// and similar. This is required because macros 2.0 normalization is lossy and the normalized
/// spans / syntax contexts no longer contain parts of macro backtraces, while the original span
/// contains everything.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct IdentKey {
name: Symbol,
ctxt: Macros20NormalizedSyntaxContext,
}
impl IdentKey {
#[inline]
fn new(ident: Ident) -> IdentKey {
IdentKey { name: ident.name, ctxt: Macros20NormalizedSyntaxContext::new(ident.span.ctxt()) }
}
#[inline]
fn with_root_ctxt(name: Symbol) -> Self {
let ctxt = Macros20NormalizedSyntaxContext::new_unchecked(SyntaxContext::root());
IdentKey { name, ctxt }
}
#[inline]
fn orig(self, orig_ident_span: Span) -> Ident {
Ident::new(self.name, orig_ident_span)
}
}
/// A key that identifies a binding in a given `Module`.
///
/// Multiple bindings in the same module can have the same key (in a valid
@ -560,7 +595,7 @@ impl ModuleKind {
struct BindingKey {
/// The identifier for the binding, always the `normalize_to_macros_2_0` version of the
/// identifier.
ident: Macros20NormalizedIdent,
ident: IdentKey,
ns: Namespace,
/// When we add an underscore binding (with ident `_`) to some module, this field has
/// a non-zero value that uniquely identifies this binding in that module.
@ -571,12 +606,12 @@ struct BindingKey {
}
impl BindingKey {
fn new(ident: Macros20NormalizedIdent, ns: Namespace) -> Self {
fn new(ident: IdentKey, ns: Namespace) -> Self {
BindingKey { ident, ns, disambiguator: 0 }
}
fn new_disambiguated(
ident: Macros20NormalizedIdent,
ident: IdentKey,
ns: Namespace,
disambiguator: impl FnOnce() -> u32,
) -> BindingKey {
@ -623,16 +658,7 @@ struct ModuleData<'ra> {
/// Used to memoize the traits in this module for faster searches through all traits in scope.
traits: CmRefCell<
Option<
Box<
[(
Macros20NormalizedIdent,
Decl<'ra>,
Option<Module<'ra>>,
bool, /* lint ambiguous */
)],
>,
>,
Option<Box<[(Symbol, Decl<'ra>, Option<Module<'ra>>, bool /* lint ambiguous */)]>>,
>,
/// Span of the module itself. Used for error reporting.
@ -699,11 +725,12 @@ impl<'ra> Module<'ra> {
fn for_each_child<'tcx, R: AsRef<Resolver<'ra, 'tcx>>>(
self,
resolver: &R,
mut f: impl FnMut(&R, Macros20NormalizedIdent, Namespace, Decl<'ra>),
mut f: impl FnMut(&R, IdentKey, Span, Namespace, Decl<'ra>),
) {
for (key, name_resolution) in resolver.as_ref().resolutions(self).borrow().iter() {
if let Some(decl) = name_resolution.borrow().best_decl() {
f(resolver, key.ident, key.ns, decl);
let name_resolution = name_resolution.borrow();
if let Some(decl) = name_resolution.best_decl() {
f(resolver, key.ident, name_resolution.orig_ident_span, key.ns, decl);
}
}
}
@ -711,11 +738,12 @@ impl<'ra> Module<'ra> {
fn for_each_child_mut<'tcx, R: AsMut<Resolver<'ra, 'tcx>>>(
self,
resolver: &mut R,
mut f: impl FnMut(&mut R, Macros20NormalizedIdent, Namespace, Decl<'ra>),
mut f: impl FnMut(&mut R, IdentKey, Span, Namespace, Decl<'ra>),
) {
for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() {
if let Some(decl) = name_resolution.borrow().best_decl() {
f(resolver, key.ident, key.ns, decl);
let name_resolution = name_resolution.borrow();
if let Some(decl) = name_resolution.best_decl() {
f(resolver, key.ident, name_resolution.orig_ident_span, key.ns, decl);
}
}
}
@ -725,13 +753,13 @@ impl<'ra> Module<'ra> {
let mut traits = self.traits.borrow_mut(resolver.as_ref());
if traits.is_none() {
let mut collected_traits = Vec::new();
self.for_each_child(resolver, |r, name, ns, binding| {
self.for_each_child(resolver, |r, ident, _, ns, binding| {
if ns != TypeNS {
return;
}
if let Res::Def(DefKind::Trait | DefKind::TraitAlias, def_id) = binding.res() {
collected_traits.push((
name,
ident.name,
binding,
r.as_ref().get_module(def_id),
binding.is_ambiguity_recursive(),
@ -1081,14 +1109,14 @@ struct ExternPreludeEntry<'ra> {
/// Name declaration from an `extern crate` item.
/// The boolean flag is true is `item_decl` is non-redundant, happens either when
/// `flag_decl` is `None`, or when `extern crate` introducing `item_decl` used renaming.
item_decl: Option<(Decl<'ra>, /* introduced by item */ bool)>,
item_decl: Option<(Decl<'ra>, Span, /* introduced by item */ bool)>,
/// Name declaration from an `--extern` flag, lazily populated on first use.
flag_decl: Option<CacheCell<(PendingDecl<'ra>, /* finalized */ bool)>>,
}
impl ExternPreludeEntry<'_> {
fn introduced_by_item(&self) -> bool {
matches!(self.item_decl, Some((_, true)))
matches!(self.item_decl, Some((.., true)))
}
fn flag() -> Self {
@ -1097,11 +1125,18 @@ impl ExternPreludeEntry<'_> {
flag_decl: Some(CacheCell::new((PendingDecl::Pending, false))),
}
}
fn span(&self) -> Span {
match self.item_decl {
Some((_, span, _)) => span,
None => DUMMY_SP,
}
}
}
struct DeriveData {
resolutions: Vec<DeriveResolution>,
helper_attrs: Vec<(usize, Macros20NormalizedIdent)>,
helper_attrs: Vec<(usize, IdentKey, Span)>,
has_derive_copy: bool,
}
@ -1137,7 +1172,7 @@ pub struct Resolver<'ra, 'tcx> {
assert_speculative: bool,
prelude: Option<Module<'ra>> = None,
extern_prelude: FxIndexMap<Macros20NormalizedIdent, ExternPreludeEntry<'ra>>,
extern_prelude: FxIndexMap<IdentKey, ExternPreludeEntry<'ra>>,
/// N.B., this is used only for better diagnostics, not name resolution itself.
field_names: LocalDefIdMap<Vec<Ident>> = Default::default(),
@ -1228,8 +1263,8 @@ pub struct Resolver<'ra, 'tcx> {
dummy_decl: Decl<'ra>,
builtin_type_decls: FxHashMap<Symbol, Decl<'ra>>,
builtin_attr_decls: FxHashMap<Symbol, Decl<'ra>>,
registered_tool_decls: FxHashMap<Ident, Decl<'ra>>,
macro_names: FxHashSet<Ident> = default::fx_hash_set(),
registered_tool_decls: FxHashMap<IdentKey, Decl<'ra>>,
macro_names: FxHashSet<IdentKey> = default::fx_hash_set(),
builtin_macros: FxHashMap<Symbol, SyntaxExtensionKind> = default::fx_hash_map(),
registered_tools: &'tcx RegisteredTools,
macro_use_prelude: FxIndexMap<Symbol, Decl<'ra>>,
@ -1265,7 +1300,7 @@ pub struct Resolver<'ra, 'tcx> {
/// `macro_rules` scopes produced by `macro_rules` item definitions.
macro_rules_scopes: FxHashMap<LocalDefId, MacroRulesScopeRef<'ra>> = default::fx_hash_map(),
/// Helper attributes that are in scope for the given expansion.
helper_attrs: FxHashMap<LocalExpnId, Vec<(Macros20NormalizedIdent, Decl<'ra>)>> = default::fx_hash_map(),
helper_attrs: FxHashMap<LocalExpnId, Vec<(IdentKey, Span, Decl<'ra>)>> = default::fx_hash_map(),
/// Ready or in-progress results of resolving paths inside the `#[derive(...)]` attribute
/// with the given `ExpnId`.
derive_data: FxHashMap<LocalExpnId, DeriveData> = default::fx_hash_map(),
@ -1409,8 +1444,11 @@ impl<'ra> ResolverArenas<'ra> {
fn alloc_import(&'ra self, import: ImportData<'ra>) -> Import<'ra> {
Interned::new_unchecked(self.imports.alloc(import))
}
fn alloc_name_resolution(&'ra self) -> &'ra CmRefCell<NameResolution<'ra>> {
self.name_resolutions.alloc(Default::default())
fn alloc_name_resolution(
&'ra self,
orig_ident_span: Span,
) -> &'ra CmRefCell<NameResolution<'ra>> {
self.name_resolutions.alloc(CmRefCell::new(NameResolution::new(orig_ident_span)))
}
fn alloc_macro_rules_scope(&'ra self, scope: MacroRulesScope<'ra>) -> MacroRulesScopeRef<'ra> {
self.dropless.alloc(CacheCell::new(scope))
@ -1580,7 +1618,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
&& let name = Symbol::intern(name)
&& name.can_be_raw()
{
let ident = Macros20NormalizedIdent::with_dummy_span(name);
let ident = IdentKey::with_root_ctxt(name);
Some((ident, ExternPreludeEntry::flag()))
} else {
None
@ -1589,10 +1627,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
.collect();
if !attr::contains_name(attrs, sym::no_core) {
let ident = Macros20NormalizedIdent::with_dummy_span(sym::core);
let ident = IdentKey::with_root_ctxt(sym::core);
extern_prelude.insert(ident, ExternPreludeEntry::flag());
if !attr::contains_name(attrs, sym::no_std) {
let ident = Macros20NormalizedIdent::with_dummy_span(sym::std);
let ident = IdentKey::with_root_ctxt(sym::std);
extern_prelude.insert(ident, ExternPreludeEntry::flag());
}
}
@ -1637,10 +1675,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
.collect(),
registered_tool_decls: registered_tools
.iter()
.map(|ident| {
.map(|&ident| {
let res = Res::ToolMod;
let decl = arenas.new_pub_def_decl(res, ident.span, LocalExpnId::ROOT);
(*ident, decl)
(IdentKey::new(ident), decl)
})
.collect(),
registered_tools,
@ -1940,7 +1978,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
{
if self.trait_may_have_item(trait_module, assoc_item) {
let def_id = trait_binding.res().def_id();
let import_ids = self.find_transitive_imports(&trait_binding.kind, trait_name.0);
let import_ids = self.find_transitive_imports(&trait_binding.kind, trait_name);
found_traits.push(TraitCandidate { def_id, import_ids, lint_ambiguous });
}
}
@ -1969,7 +2007,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn find_transitive_imports(
&mut self,
mut kind: &DeclKind<'_>,
trait_name: Ident,
trait_name: Symbol,
) -> SmallVec<[LocalDefId; 1]> {
let mut import_ids = smallvec![];
while let DeclKind::Import { import, source_decl, .. } = kind {
@ -2004,11 +2042,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
&self,
module: Module<'ra>,
key: BindingKey,
orig_ident_span: Span,
) -> &'ra CmRefCell<NameResolution<'ra>> {
self.resolutions(module)
.borrow_mut_unchecked()
.entry(key)
.or_insert_with(|| self.arenas.alloc_name_resolution())
.or_insert_with(|| self.arenas.alloc_name_resolution(orig_ident_span))
}
/// Test if AmbiguityError ambi is any identical to any one inside ambiguity_errors
@ -2082,8 +2121,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// Avoid marking `extern crate` items that refer to a name from extern prelude,
// but not introduce it, as used if they are accessed from lexical scope.
if used == Used::Scope
&& let Some(entry) = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident))
&& entry.item_decl == Some((used_decl, false))
&& let Some(entry) = self.extern_prelude.get(&IdentKey::new(ident))
&& let Some((item_decl, _, false)) = entry.item_decl
&& item_decl == used_decl
{
return;
}
@ -2094,7 +2134,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(id) = import.id() {
self.used_imports.insert(id);
}
self.add_to_glob_map(import, ident);
self.add_to_glob_map(import, ident.name);
self.record_use_inner(
ident,
source_decl,
@ -2105,10 +2145,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
#[inline]
fn add_to_glob_map(&mut self, import: Import<'_>, ident: Ident) {
fn add_to_glob_map(&mut self, import: Import<'_>, name: Symbol) {
if let ImportKind::Glob { id, .. } = import.kind {
let def_id = self.local_def_id(id);
self.glob_map.entry(def_id).or_default().insert(ident.name);
self.glob_map.entry(def_id).or_default().insert(name);
}
}
@ -2230,13 +2270,14 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn extern_prelude_get_item<'r>(
mut self: CmResolver<'r, 'ra, 'tcx>,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
finalize: bool,
) -> Option<Decl<'ra>> {
let entry = self.extern_prelude.get(&ident);
entry.and_then(|entry| entry.item_decl).map(|(decl, _)| {
entry.and_then(|entry| entry.item_decl).map(|(decl, ..)| {
if finalize {
self.get_mut().record_use(ident.0, decl, Used::Scope);
self.get_mut().record_use(ident.orig(orig_ident_span), decl, Used::Scope);
}
decl
})
@ -2244,7 +2285,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn extern_prelude_get_flag(
&self,
ident: Macros20NormalizedIdent,
ident: IdentKey,
orig_ident_span: Span,
finalize: bool,
) -> Option<Decl<'ra>> {
let entry = self.extern_prelude.get(&ident);
@ -2253,14 +2295,18 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let decl = match pending_decl {
PendingDecl::Ready(decl) => {
if finalize && !finalized {
self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span);
self.cstore_mut().process_path_extern(
self.tcx,
ident.name,
orig_ident_span,
);
}
decl
}
PendingDecl::Pending => {
debug_assert!(!finalized);
let crate_id = if finalize {
self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span)
self.cstore_mut().process_path_extern(self.tcx, ident.name, orig_ident_span)
} else {
self.cstore_mut().maybe_process_path_extern(self.tcx, ident.name)
};
@ -2675,3 +2721,40 @@ mod ref_mut {
}
}
}
mod hygiene {
use rustc_span::SyntaxContext;
/// A newtype around `SyntaxContext` that can only keep contexts produced by
/// [SyntaxContext::normalize_to_macros_2_0].
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub(crate) struct Macros20NormalizedSyntaxContext(SyntaxContext);
impl Macros20NormalizedSyntaxContext {
#[inline]
pub(crate) fn new(ctxt: SyntaxContext) -> Macros20NormalizedSyntaxContext {
Macros20NormalizedSyntaxContext(ctxt.normalize_to_macros_2_0())
}
#[inline]
pub(crate) fn new_unchecked(ctxt: SyntaxContext) -> Macros20NormalizedSyntaxContext {
debug_assert_eq!(ctxt, ctxt.normalize_to_macros_2_0());
Macros20NormalizedSyntaxContext(ctxt)
}
/// The passed closure must preserve the context's normalized-ness.
#[inline]
pub(crate) fn update_unchecked<R>(&mut self, f: impl FnOnce(&mut SyntaxContext) -> R) -> R {
let ret = f(&mut self.0);
debug_assert_eq!(self.0, self.0.normalize_to_macros_2_0());
ret
}
}
impl std::ops::Deref for Macros20NormalizedSyntaxContext {
type Target = SyntaxContext;
fn deref(&self) -> &Self::Target {
&self.0
}
}
}

View file

@ -29,16 +29,17 @@ use rustc_session::parse::feature_err;
use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{self, AstPass, ExpnData, ExpnKind, LocalExpnId, MacroKind};
use rustc_span::{DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym};
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
use crate::Namespace::*;
use crate::errors::{
self, AddAsNonDerive, CannotDetermineMacroResolution, CannotFindIdentInThisScope,
MacroExpectedFound, RemoveSurroundingDerive,
};
use crate::hygiene::Macros20NormalizedSyntaxContext;
use crate::imports::Import;
use crate::{
BindingKey, CacheCell, CmResolver, Decl, DeclKind, DeriveData, Determinacy, Finalize,
BindingKey, CacheCell, CmResolver, Decl, DeclKind, DeriveData, Determinacy, Finalize, IdentKey,
InvocationParent, MacroData, ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult,
ResolutionError, Resolver, ScopeSet, Segment, Used,
};
@ -52,7 +53,8 @@ pub(crate) struct MacroRulesDecl<'ra> {
pub(crate) decl: Decl<'ra>,
/// `macro_rules` scope into which the `macro_rules` item was planted.
pub(crate) parent_macro_rules_scope: MacroRulesScopeRef<'ra>,
pub(crate) ident: Macros20NormalizedIdent,
pub(crate) ident: IdentKey,
pub(crate) orig_ident_span: Span,
}
/// The scope introduced by a `macro_rules!` macro.
@ -412,9 +414,12 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
Ok((Some(ext), _)) => {
if !ext.helper_attrs.is_empty() {
let span = resolution.path.segments.last().unwrap().ident.span;
entry.helper_attrs.extend(ext.helper_attrs.iter().map(|name| {
(i, Macros20NormalizedIdent::new(Ident::new(*name, span)))
}));
let ctxt = Macros20NormalizedSyntaxContext::new(span.ctxt());
entry.helper_attrs.extend(
ext.helper_attrs
.iter()
.map(|&name| (i, IdentKey { name, ctxt }, span)),
);
}
entry.has_derive_copy |= ext.builtin_name == Some(sym::Copy);
ext
@ -430,13 +435,14 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
}
}
// Sort helpers in a stable way independent from the derive resolution order.
entry.helper_attrs.sort_by_key(|(i, _)| *i);
entry.helper_attrs.sort_by_key(|(i, ..)| *i);
let helper_attrs = entry
.helper_attrs
.iter()
.map(|(_, ident)| {
.map(|&(_, ident, orig_ident_span)| {
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
(*ident, self.arenas.new_pub_def_decl(res, ident.span, expn_id))
let decl = self.arenas.new_pub_def_decl(res, orig_ident_span, expn_id);
(ident, orig_ident_span, decl)
})
.collect();
self.helper_attrs.insert(expn_id, helper_attrs);
@ -532,14 +538,14 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
}
let mut idents = Vec::new();
target_trait.for_each_child(self, |this, ident, ns, _binding| {
target_trait.for_each_child(self, |this, ident, orig_ident_span, ns, _binding| {
// FIXME: Adjust hygiene for idents from globs, like for glob imports.
if let Some(overriding_keys) = this.impl_binding_keys.get(&impl_def_id)
&& overriding_keys.contains(&BindingKey::new(ident, ns))
{
// The name is overridden, do not produce it from the glob delegation.
} else {
idents.push((ident.0, None));
idents.push((ident.orig(orig_ident_span), None));
}
});
Ok(idents)
@ -1145,14 +1151,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
}
pub(crate) fn check_reserved_macro_name(&self, ident: Ident, res: Res) {
pub(crate) fn check_reserved_macro_name(&self, name: Symbol, span: Span, res: Res) {
// Reserve some names that are not quite covered by the general check
// performed on `Resolver::builtin_attrs`.
if ident.name == sym::cfg || ident.name == sym::cfg_attr {
if name == sym::cfg || name == sym::cfg_attr {
let macro_kinds = self.get_macro(res).map(|macro_data| macro_data.ext.macro_kinds());
if macro_kinds.is_some() && sub_namespace_match(macro_kinds, Some(MacroKind::Attr)) {
self.dcx()
.emit_err(errors::NameReservedInAttributeNamespace { span: ident.span, ident });
self.dcx().emit_err(errors::NameReservedInAttributeNamespace { span, ident: name });
}
}
}

View file

@ -837,11 +837,7 @@ impl SyntaxContext {
/// ```
/// This returns `None` if the context cannot be glob-adjusted.
/// Otherwise, it returns the scope to use when privacy checking (see `adjust` for details).
pub(crate) fn glob_adjust(
&mut self,
expn_id: ExpnId,
glob_span: Span,
) -> Option<Option<ExpnId>> {
pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
HygieneData::with(|data| {
let mut scope = None;
let mut glob_ctxt = data.normalize_to_macros_2_0(glob_span.ctxt());
@ -865,7 +861,7 @@ impl SyntaxContext {
/// assert!(self.glob_adjust(expansion, glob_ctxt) == Some(privacy_checking_scope));
/// }
/// ```
pub(crate) fn reverse_glob_adjust(
pub fn reverse_glob_adjust(
&mut self,
expn_id: ExpnId,
glob_span: Span,

View file

@ -63,8 +63,7 @@ pub use span_encoding::{DUMMY_SP, Span};
pub mod symbol;
pub use symbol::{
ByteSymbol, Ident, MacroRulesNormalizedIdent, Macros20NormalizedIdent, STDLIB_STABLE_CRATES,
Symbol, kw, sym,
ByteSymbol, Ident, MacroRulesNormalizedIdent, STDLIB_STABLE_CRATES, Symbol, kw, sym,
};
mod analyze_source_file;
@ -1312,30 +1311,6 @@ impl Span {
mark
}
#[inline]
pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
let mut mark = None;
*self = self.map_ctxt(|mut ctxt| {
mark = ctxt.glob_adjust(expn_id, glob_span);
ctxt
});
mark
}
#[inline]
pub fn reverse_glob_adjust(
&mut self,
expn_id: ExpnId,
glob_span: Span,
) -> Option<Option<ExpnId>> {
let mut mark = None;
*self = self.map_ctxt(|mut ctxt| {
mark = ctxt.reverse_glob_adjust(expn_id, glob_span);
ctxt
});
mark
}
#[inline]
pub fn normalize_to_macros_2_0(self) -> Span {
self.map_ctxt(|ctxt| ctxt.normalize_to_macros_2_0())

View file

@ -3,7 +3,6 @@
//! type, and vice versa.
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::{fmt, str};
use rustc_arena::DroplessArena;
@ -186,6 +185,7 @@ symbols! {
AtomicU64,
AtomicU128,
AtomicUsize,
AutoTrait,
BTreeEntry,
BTreeMap,
BTreeSet,
@ -231,6 +231,7 @@ symbols! {
Display,
DoubleEndedIterator,
Duration,
DynTrait,
Encodable,
Encoder,
Enumerate,
@ -1297,6 +1298,7 @@ symbols! {
io_stdout,
irrefutable_let_patterns,
is,
is_auto,
is_val_statically_known,
isa_attribute,
isize,
@ -1750,6 +1752,7 @@ symbols! {
precise_capturing_in_traits,
precise_pointer_size_matching,
precision,
predicates,
pref_align_of,
prefetch_read_data,
prefetch_read_instruction,
@ -2297,6 +2300,7 @@ symbols! {
trace_macros,
track_caller,
trait_alias,
trait_ty,
trait_upcasting,
transmute,
transmute_generic_consts,
@ -2748,7 +2752,7 @@ impl fmt::Display for IdentPrinter {
}
}
/// An newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on
/// A newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on
/// construction for "local variable hygiene" comparisons.
///
/// Use this type when you need to compare identifiers according to macro_rules hygiene.
@ -2775,48 +2779,6 @@ impl fmt::Display for MacroRulesNormalizedIdent {
}
}
/// An newtype around `Ident` that calls [Ident::normalize_to_macros_2_0] on
/// construction for "item hygiene" comparisons.
///
/// Identifiers with same string value become same if they came from the same macro 2.0 macro
/// (e.g., `macro` item, but not `macro_rules` item) and stay different if they came from
/// different macro 2.0 macros.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Macros20NormalizedIdent(pub Ident);
impl Macros20NormalizedIdent {
#[inline]
pub fn new(ident: Ident) -> Self {
Macros20NormalizedIdent(ident.normalize_to_macros_2_0())
}
// dummy_span does not need to be normalized, so we can use `Ident` directly
pub fn with_dummy_span(name: Symbol) -> Self {
Macros20NormalizedIdent(Ident::with_dummy_span(name))
}
}
impl fmt::Debug for Macros20NormalizedIdent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl fmt::Display for Macros20NormalizedIdent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
/// By impl Deref, we can access the wrapped Ident as if it were a normal Ident
/// such as `norm_ident.name` instead of `norm_ident.0.name`.
impl Deref for Macros20NormalizedIdent {
type Target = Ident;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// An interned UTF-8 string.
///
/// Internally, a `Symbol` is implemented as an index, and all operations

View file

@ -1709,6 +1709,8 @@ supported_targets! {
("aarch64-unknown-none-softfloat", aarch64_unknown_none_softfloat),
("aarch64_be-unknown-none-softfloat", aarch64_be_unknown_none_softfloat),
("aarch64-unknown-nuttx", aarch64_unknown_nuttx),
("aarch64v8r-unknown-none", aarch64v8r_unknown_none),
("aarch64v8r-unknown-none-softfloat", aarch64v8r_unknown_none_softfloat),
("x86_64-fortanix-unknown-sgx", x86_64_fortanix_unknown_sgx),

View file

@ -0,0 +1,37 @@
use crate::spec::{
Arch, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, SanitizerSet, StackProbeType, Target,
TargetMetadata, TargetOptions,
};
pub(crate) fn target() -> Target {
let opts = TargetOptions {
// based off the aarch64-unknown-none target at time of addition
linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
linker: Some("rust-lld".into()),
supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS,
relocation_model: RelocModel::Static,
disable_redzone: true,
max_atomic_width: Some(128),
stack_probes: StackProbeType::Inline,
panic_strategy: PanicStrategy::Abort,
default_uwtable: true,
// deviations from aarch64-unknown-none: `+v8a` -> `+v8r`; `+v8r` implies `+neon`
features: "+v8r,+strict-align".into(),
..Default::default()
};
Target {
llvm_target: "aarch64-unknown-none".into(),
metadata: TargetMetadata {
description: Some("Bare Armv8-R AArch64, hardfloat".into()),
tier: Some(3),
host_tools: Some(false),
std: Some(false),
},
pointer_width: 64,
// $ clang-21 -S -emit-llvm -target aarch64 -mcpu=cortex-r82 stub.c
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
arch: Arch::AArch64,
options: opts,
}
}

View file

@ -0,0 +1,36 @@
use crate::spec::{
Abi, Arch, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, SanitizerSet, StackProbeType,
Target, TargetMetadata, TargetOptions,
};
pub(crate) fn target() -> Target {
let opts = TargetOptions {
abi: Abi::SoftFloat,
linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
linker: Some("rust-lld".into()),
relocation_model: RelocModel::Static,
disable_redzone: true,
max_atomic_width: Some(128),
supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS,
stack_probes: StackProbeType::Inline,
panic_strategy: PanicStrategy::Abort,
default_uwtable: true,
// deviations from aarch64-unknown-none: `+v8a` -> `+v8r`
features: "+v8r,+strict-align,-neon".into(),
..Default::default()
};
Target {
llvm_target: "aarch64-unknown-none".into(),
metadata: TargetMetadata {
description: Some("Bare Armv8-R AArch64, softfloat".into()),
tier: Some(3),
host_tools: Some(false),
std: Some(false),
},
pointer_width: 64,
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
arch: Arch::AArch64,
options: opts,
}
}

View file

@ -19,6 +19,8 @@ pub(crate) fn target() -> Target {
pre_link_args,
post_link_args,
relocation_model: RelocModel::Pic,
crt_static_respected: true,
crt_static_default: true,
panic_strategy: PanicStrategy::Unwind,
no_default_libraries: false,
families: cvs!["unix", "wasm"],

View file

@ -274,9 +274,9 @@ dependencies = [
[[package]]
name = "rustc-demangle"
version = "0.1.26"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d"
dependencies = [
"rustc-std-workspace-core",
]
@ -346,7 +346,7 @@ dependencies = [
"vex-sdk",
"wasi 0.11.1+wasi-snapshot-preview1",
"wasi 0.14.4+wasi-0.2.4",
"windows-targets 0.0.0",
"windows-link 0.0.0",
]
[[package]]
@ -427,6 +427,10 @@ dependencies = [
"wit-bindgen",
]
[[package]]
name = "windows-link"
version = "0.0.0"
[[package]]
name = "windows-link"
version = "0.2.1"
@ -439,20 +443,16 @@ version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
dependencies = [
"windows-targets 0.53.5",
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.0.0"
[[package]]
name = "windows-targets"
version = "0.53.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
dependencies = [
"windows-link",
"windows-link 0.2.1",
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",

View file

@ -12,7 +12,7 @@ members = [
exclude = [
# stdarch has its own Cargo workspace
"stdarch",
"windows_targets"
"windows_link"
]
[profile.release.package.compiler_builtins]

View file

@ -21,7 +21,6 @@ compiler_builtins = { path = "../compiler-builtins/compiler-builtins", features
[features]
compiler-builtins-mem = ['compiler_builtins/mem']
compiler-builtins-c = ["compiler_builtins/c"]
compiler-builtins-no-f16-f128 = ["compiler_builtins/no-f16-f128"]
# Choose algorithms that are optimized for binary size instead of runtime performance
optimize_for_size = ["core/optimize_for_size"]

View file

@ -143,7 +143,11 @@ impl<T, A: Allocator> Drain<'_, T, A> {
let new_tail_start = tail_start + additional;
unsafe {
deque.wrap_copy(tail_start, new_tail_start, self.tail_len);
deque.wrap_copy(
deque.to_physical_idx(tail_start),
deque.to_physical_idx(new_tail_start),
self.tail_len,
);
}
self.drain_len += additional;
}

View file

@ -136,9 +136,10 @@
//! padding specified by fill/alignment will be used to take up the required
//! space (see below).
//!
//! The value for the width can also be provided as a [`usize`] in the list of
//! parameters by adding a postfix `$`, indicating that the second argument is
//! a [`usize`] specifying the width.
//! The width can also be provided dynamically by referencing another argument
//! with a `$` suffix. Use `{:N$}` to reference the Nth positional argument
//! (where N is an integer), or `{:name$}` to reference a named argument. The
//! referenced argument must be of type [`usize`].
//!
//! Referring to an argument with the dollar syntax does not affect the "next
//! argument" counter, so it's usually a good idea to refer to arguments by
@ -236,7 +237,8 @@
//!
//! 2. An integer or name followed by dollar sign `.N$`:
//!
//! use format *argument* `N` (which must be a `usize`) as the precision.
//! use the value of format *argument* `N` (which must be a `usize`) as the precision.
//! An integer refers to a positional argument, and a name refers to a named argument.
//!
//! 3. An asterisk `.*`:
//!
@ -363,7 +365,10 @@
//! - `ws` is any character for which [`char::is_whitespace`] returns `true`, has no semantic
//! meaning and is completely optional,
//! - `integer` is a decimal integer that may contain leading zeroes and must fit into an `usize` and
//! - `identifier` is an `IDENTIFIER_OR_KEYWORD` (not an `IDENTIFIER`) as defined by the [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html).
//! - `identifier` is an `IDENTIFIER_OR_KEYWORD` (not an `IDENTIFIER`) as
//! defined by the [Rust language
//! reference](https://doc.rust-lang.org/reference/identifiers.html), except
//! for a bare `_`.
//!
//! # Formatting traits
//!

View file

@ -3,9 +3,7 @@ use alloc::rc::Rc;
use alloc::sync::Arc;
use core::assert_matches;
use core::ffi::{CStr, FromBytesUntilNulError, c_char};
#[allow(deprecated)]
use core::hash::SipHasher13 as DefaultHasher;
use core::hash::{Hash, Hasher};
use core::hash::{Hash, Hasher, SipHasher13 as DefaultHasher};
#[test]
fn c_to_rust() {
@ -57,11 +55,9 @@ fn equal_hash() {
let ptr = data.as_ptr() as *const c_char;
let cstr: &'static CStr = unsafe { CStr::from_ptr(ptr) };
#[allow(deprecated)]
let mut s = DefaultHasher::new();
cstr.hash(&mut s);
let cstr_hash = s.finish();
#[allow(deprecated)]
let mut s = DefaultHasher::new();
CString::new(&data[..data.len() - 1]).unwrap().hash(&mut s);
let cstring_hash = s.finish();

View file

@ -2336,3 +2336,14 @@ fn test_splice_forget() {
std::mem::forget(v.splice(2..4, a));
assert_eq!(v, &[1, 2]);
}
#[test]
fn test_splice_wrapping() {
let mut vec = VecDeque::with_capacity(10);
vec.push_front(7u8);
vec.push_back(9);
vec.splice(1..1, [8]);
assert_eq!(Vec::from(vec), [7, 8, 9]);
}

@ -1 +1 @@
Subproject commit b65ab935fb2e0d59dba8966ffca09c9cc5a5f57c
Subproject commit 28ec93b503bf0410745bc3d571bf3dc1caac3019

View file

@ -47,10 +47,6 @@ c = ["dep:cc"]
# the generic versions on all platforms.
no-asm = []
# Workaround for codegen backends which haven't yet implemented `f16` and
# `f128` support. Disabled any intrinsics which use those types.
no-f16-f128 = []
# Flag this library as the unstable compiler-builtins lib
compiler-builtins = []

View file

@ -33,7 +33,6 @@ utest-macros = { git = "https://github.com/japaric/utest" }
default = ["mangled-names"]
c = ["compiler_builtins/c"]
no-asm = ["compiler_builtins/no-asm"]
no-f16-f128 = ["compiler_builtins/no-f16-f128"]
mem = ["compiler_builtins/mem"]
mangled-names = ["compiler_builtins/mangled-names"]
# Skip tests that rely on f128 symbols being available on the system

View file

@ -36,8 +36,6 @@ else
"${test_builtins[@]}" --features c --release
"${test_builtins[@]}" --features no-asm
"${test_builtins[@]}" --features no-asm --release
"${test_builtins[@]}" --features no-f16-f128
"${test_builtins[@]}" --features no-f16-f128 --release
"${test_builtins[@]}" --benches
"${test_builtins[@]}" --benches --release
@ -63,8 +61,6 @@ symcheck+=(-- build-and-check)
"${symcheck[@]}" "$target" -- -p compiler_builtins --features c --release
"${symcheck[@]}" "$target" -- -p compiler_builtins --features no-asm
"${symcheck[@]}" "$target" -- -p compiler_builtins --features no-asm --release
"${symcheck[@]}" "$target" -- -p compiler_builtins --features no-f16-f128
"${symcheck[@]}" "$target" -- -p compiler_builtins --features no-f16-f128 --release
run_intrinsics_test() {
build_args=(--verbose --manifest-path builtins-test-intrinsics/Cargo.toml)

View file

@ -45,10 +45,6 @@ c = ["dep:cc"]
# the generic versions on all platforms.
no-asm = []
# Workaround for codegen backends which haven't yet implemented `f16` and
# `f128` support. Disabled any intrinsics which use those types.
no-f16-f128 = []
# Flag this library as the unstable compiler-builtins lib
compiler-builtins = []

View file

@ -95,16 +95,13 @@ pub fn configure_aliases(target: &Target) {
* * https://github.com/rust-lang/rustc_codegen_cranelift/blob/c713ffab3c6e28ab4b4dd4e392330f786ea657ad/src/lib.rs#L196-L226
*/
// If the feature is set, disable both of these types.
let no_f16_f128 = target.cargo_features.iter().any(|s| s == "no-f16-f128");
println!("cargo::rustc-check-cfg=cfg(f16_enabled)");
if target.reliable_f16 && !no_f16_f128 {
if target.reliable_f16 {
println!("cargo::rustc-cfg=f16_enabled");
}
println!("cargo::rustc-check-cfg=cfg(f128_enabled)");
if target.reliable_f128 && !no_f16_f128 {
if target.reliable_f128 {
println!("cargo::rustc-cfg=f128_enabled");
}
}

View file

@ -143,16 +143,13 @@ fn emit_f16_f128_cfg(cfg: &Config) {
/* See the compiler-builtins configure file for info about the meaning of these options */
// If the feature is set, disable both of these types.
let no_f16_f128 = cfg.cargo_features.iter().any(|s| s == "no-f16-f128");
println!("cargo:rustc-check-cfg=cfg(f16_enabled)");
if cfg.reliable_f16 && !no_f16_f128 {
if cfg.reliable_f16 {
println!("cargo:rustc-cfg=f16_enabled");
}
println!("cargo:rustc-check-cfg=cfg(f128_enabled)");
if cfg.reliable_f128 && !no_f16_f128 {
if cfg.reliable_f128 {
println!("cargo:rustc-cfg=f128_enabled");
}
}

View file

@ -13,8 +13,14 @@ macro_rules! impl_general_format {
($($t:ident)*) => {
$(impl GeneralFormat for $t {
fn already_rounded_value_should_use_exponential(&self) -> bool {
// `max_abs` rounds to infinity for `f16`. This is fine to save us from a more
// complex macro, it just means a positive-exponent `f16` will never print as
// scientific notation by default (reasonably, the max is 65504.0).
#[allow(overflowing_literals)]
let max_abs = 1e+16;
let abs = $t::abs(*self);
(abs != 0.0 && abs < 1e-4) || abs >= 1e+16
(abs != 0.0 && abs < 1e-4) || abs >= max_abs
}
})*
}

View file

@ -87,7 +87,6 @@
#[allow(deprecated)]
pub use self::sip::SipHasher;
#[unstable(feature = "hashmap_internals", issue = "none")]
#[allow(deprecated)]
#[doc(hidden)]
pub use self::sip::SipHasher13;
use crate::{fmt, marker};

View file

@ -11,8 +11,11 @@ use crate::{cmp, ptr};
/// (e.g., `collections::HashMap` uses it by default).
///
/// See: <https://github.com/veorq/SipHash>
#[unstable(feature = "hashmap_internals", issue = "none")]
#[deprecated(since = "1.13.0", note = "use `std::hash::DefaultHasher` instead")]
#[unstable(
feature = "hashmap_internals",
issue = "none",
reason = "use `std::hash::DefaultHasher` instead"
)]
#[derive(Debug, Clone, Default)]
#[doc(hidden)]
pub struct SipHasher13 {
@ -23,7 +26,6 @@ pub struct SipHasher13 {
///
/// See: <https://github.com/veorq/SipHash>
#[unstable(feature = "hashmap_internals", issue = "none")]
#[deprecated(since = "1.13.0", note = "use `std::hash::DefaultHasher` instead")]
#[derive(Debug, Clone, Default)]
struct SipHasher24 {
hasher: Hasher<Sip24Rounds>,
@ -137,8 +139,7 @@ unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
out |= (unsafe { *buf.get_unchecked(start + i) } as u64) << (i * 8);
i += 1;
}
//FIXME(fee1-dead): use debug_assert_eq
debug_assert!(i == len);
debug_assert_eq!(i, len);
out
}
@ -167,7 +168,6 @@ impl SipHasher13 {
#[inline]
#[unstable(feature = "hashmap_internals", issue = "none")]
#[rustc_const_unstable(feature = "const_default", issue = "143894")]
#[deprecated(since = "1.13.0", note = "use `std::hash::DefaultHasher` instead")]
pub const fn new() -> SipHasher13 {
SipHasher13::new_with_keys(0, 0)
}
@ -176,7 +176,6 @@ impl SipHasher13 {
#[inline]
#[unstable(feature = "hashmap_internals", issue = "none")]
#[rustc_const_unstable(feature = "const_default", issue = "143894")]
#[deprecated(since = "1.13.0", note = "use `std::hash::DefaultHasher` instead")]
pub const fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 {
SipHasher13 { hasher: Hasher::new_with_keys(key0, key1) }
}

View file

@ -182,6 +182,7 @@
#![feature(staged_api)]
#![feature(stmt_expr_attributes)]
#![feature(strict_provenance_lints)]
#![feature(target_feature_inline_always)]
#![feature(trait_alias)]
#![feature(transparent_unions)]
#![feature(try_blocks)]

View file

@ -47,6 +47,8 @@ pub enum TypeKind {
Array(Array),
/// Slices.
Slice(Slice),
/// Dynamic Traits.
DynTrait(DynTrait),
/// Primitive boolean type.
Bool(Bool),
/// Primitive character type.
@ -105,6 +107,36 @@ pub struct Slice {
pub element_ty: TypeId,
}
/// Compile-time type information about dynamic traits.
/// FIXME(#146922): Add super traits and generics
#[derive(Debug)]
#[non_exhaustive]
#[unstable(feature = "type_info", issue = "146922")]
pub struct DynTrait {
/// The predicates of a dynamic trait.
pub predicates: &'static [DynTraitPredicate],
}
/// Compile-time type information about a dynamic trait predicate.
#[derive(Debug)]
#[non_exhaustive]
#[unstable(feature = "type_info", issue = "146922")]
pub struct DynTraitPredicate {
/// The type of the trait as a dynamic trait type.
pub trait_ty: Trait,
}
/// Compile-time type information about a trait.
#[derive(Debug)]
#[non_exhaustive]
#[unstable(feature = "type_info", issue = "146922")]
pub struct Trait {
/// The TypeId of the trait as a dynamic type
pub ty: TypeId,
/// Whether the trait is an auto trait
pub is_auto: bool,
}
/// Compile-time type information about `bool`.
#[derive(Debug)]
#[non_exhaustive]

View file

@ -1,12 +1,12 @@
//! Implementations for `uN::gather_bits` and `uN::scatter_bits`
//! Implementations for `uN::extract_bits` and `uN::deposit_bits`
//!
//! For the purposes of this implementation, the operations can be thought
//! of as operating on the input bits as a list, starting from the least
//! significant bit. Gathering is like `Vec::retain` that deletes bits
//! where the mask has a zero. Scattering is like doing the inverse by
//! inserting the zeros that gathering would delete.
//! significant bit. Extraction is like `Vec::retain` that deletes bits
//! where the mask has a zero. Deposition is like doing the inverse by
//! inserting the zeros that extraction would delete.
//!
//! Key observation: Each bit that is gathered/scattered needs to be
//! Key observation: Each extracted or deposited bit needs to be
//! shifted by the count of zeros up to the corresponding mask bit.
//!
//! With that in mind, the general idea is to decompose the operation into
@ -14,7 +14,7 @@
//! of the bits by `n = 1 << stage`. The masks for each stage are computed
//! via prefix counts of zeros in the mask.
//!
//! # Gathering
//! # Extraction
//!
//! Consider the input as a sequence of runs of data (bitstrings A,B,C,...),
//! split by fixed-width groups of zeros ('.'), initially at width `n = 1`.
@ -36,9 +36,9 @@
//! ........abbbcccccddeghh
//! ```
//!
//! # Scattering
//! # Deposition
//!
//! For `scatter_bits`, the stages are reversed. We start with a single run of
//! For `deposit_bits`, the stages are reversed. We start with a single run of
//! data in the low bits. Each stage then splits each run of data in two by
//! shifting part of it left by `n`, which is halved each stage.
//! ```text
@ -100,7 +100,7 @@ macro_rules! uint_impl {
}
#[inline(always)]
pub(in super::super) const fn gather_impl(mut x: $U, sparse: $U) -> $U {
pub(in super::super) const fn extract_impl(mut x: $U, sparse: $U) -> $U {
let masks = prepare(sparse);
x &= sparse;
let mut stage = 0;
@ -131,7 +131,7 @@ macro_rules! uint_impl {
x
}
#[inline(always)]
pub(in super::super) const fn scatter_impl(mut x: $U, sparse: $U) -> $U {
pub(in super::super) const fn deposit_impl(mut x: $U, sparse: $U) -> $U {
let masks = prepare(sparse);
let mut stage = STAGES;
while stage > 0 {

View file

@ -507,15 +507,15 @@ macro_rules! uint_impl {
/// #![feature(uint_gather_scatter_bits)]
#[doc = concat!("let n: ", stringify!($SelfT), " = 0b1011_1100;")]
///
/// assert_eq!(n.gather_bits(0b0010_0100), 0b0000_0011);
/// assert_eq!(n.gather_bits(0xF0), 0b0000_1011);
/// assert_eq!(n.extract_bits(0b0010_0100), 0b0000_0011);
/// assert_eq!(n.extract_bits(0xF0), 0b0000_1011);
/// ```
#[unstable(feature = "uint_gather_scatter_bits", issue = "149069")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub const fn gather_bits(self, mask: Self) -> Self {
crate::num::int_bits::$ActualT::gather_impl(self as $ActualT, mask as $ActualT) as $SelfT
pub const fn extract_bits(self, mask: Self) -> Self {
crate::num::int_bits::$ActualT::extract_impl(self as $ActualT, mask as $ActualT) as $SelfT
}
/// Returns an integer with the least significant bits of `self`
@ -524,15 +524,15 @@ macro_rules! uint_impl {
/// #![feature(uint_gather_scatter_bits)]
#[doc = concat!("let n: ", stringify!($SelfT), " = 0b1010_1101;")]
///
/// assert_eq!(n.scatter_bits(0b0101_0101), 0b0101_0001);
/// assert_eq!(n.scatter_bits(0xF0), 0b1101_0000);
/// assert_eq!(n.deposit_bits(0b0101_0101), 0b0101_0001);
/// assert_eq!(n.deposit_bits(0xF0), 0b1101_0000);
/// ```
#[unstable(feature = "uint_gather_scatter_bits", issue = "149069")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub const fn scatter_bits(self, mask: Self) -> Self {
crate::num::int_bits::$ActualT::scatter_impl(self as $ActualT, mask as $ActualT) as $SelfT
pub const fn deposit_bits(self, mask: Self) -> Self {
crate::num::int_bits::$ActualT::deposit_impl(self as $ActualT, mask as $ActualT) as $SelfT
}
/// Reverses the order of bits in the integer. The least significant bit becomes the most significant bit,

View file

@ -1,6 +1,7 @@
//! OS-specific functionality.
#![unstable(feature = "darwin_objc", issue = "145496")]
#![allow(missing_docs)]
#[cfg(all(
doc,

View file

@ -1354,7 +1354,7 @@ impl<T, E> Result<T, E> {
/// let s: String = only_good_news().into_ok();
/// println!("{s}");
/// ```
#[unstable(feature = "unwrap_infallible", reason = "newly added", issue = "61695")]
#[unstable(feature = "unwrap_infallible", issue = "61695")]
#[inline]
#[rustc_allow_const_fn_unstable(const_precise_live_drops)]
#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
@ -1391,7 +1391,7 @@ impl<T, E> Result<T, E> {
/// let error: String = only_bad_news().into_err();
/// println!("{error}");
/// ```
#[unstable(feature = "unwrap_infallible", reason = "newly added", issue = "61695")]
#[unstable(feature = "unwrap_infallible", issue = "61695")]
#[inline]
#[rustc_allow_const_fn_unstable(const_precise_live_drops)]
#[rustc_const_unstable(feature = "const_convert", issue = "143773")]

View file

@ -62,6 +62,25 @@ impl [u8] {
return false;
}
#[cfg(all(target_arch = "x86_64", target_feature = "sse2"))]
{
const CHUNK_SIZE: usize = 16;
// The following function has two invariants:
// 1. The slice lengths must be equal, which we checked above.
// 2. The slice lengths must greater than or equal to N, which this
// if-statement is checking.
if self.len() >= CHUNK_SIZE {
return self.eq_ignore_ascii_case_chunks::<CHUNK_SIZE>(other);
}
}
self.eq_ignore_ascii_case_simple(other)
}
/// ASCII case-insensitive equality check without chunk-at-a-time
/// optimization.
#[inline]
const fn eq_ignore_ascii_case_simple(&self, other: &[u8]) -> bool {
// FIXME(const-hack): This implementation can be reverted when
// `core::iter::zip` is allowed in const. The original implementation:
// self.len() == other.len() && iter::zip(self, other).all(|(a, b)| a.eq_ignore_ascii_case(b))
@ -80,6 +99,65 @@ impl [u8] {
true
}
/// Optimized version of `eq_ignore_ascii_case` to process chunks at a time.
///
/// Platforms that have SIMD instructions may benefit from this
/// implementation over `eq_ignore_ascii_case_simple`.
///
/// # Invariants
///
/// The caller must guarantee that the slices are equal in length, and the
/// slice lengths are greater than or equal to `N` bytes.
#[cfg(all(target_arch = "x86_64", target_feature = "sse2"))]
#[inline]
const fn eq_ignore_ascii_case_chunks<const N: usize>(&self, other: &[u8]) -> bool {
// FIXME(const-hack): The while-loops that follow should be replaced by
// for-loops when available in const.
let (self_chunks, self_rem) = self.as_chunks::<N>();
let (other_chunks, _) = other.as_chunks::<N>();
// Branchless check to encourage auto-vectorization
#[inline(always)]
const fn eq_ignore_ascii_inner<const L: usize>(lhs: &[u8; L], rhs: &[u8; L]) -> bool {
let mut equal_ascii = true;
let mut j = 0;
while j < L {
equal_ascii &= lhs[j].eq_ignore_ascii_case(&rhs[j]);
j += 1;
}
equal_ascii
}
// Process the chunks, returning early if an inequality is found
let mut i = 0;
while i < self_chunks.len() && i < other_chunks.len() {
if !eq_ignore_ascii_inner(&self_chunks[i], &other_chunks[i]) {
return false;
}
i += 1;
}
// Check the length invariant which is necessary for the tail-handling
// logic to be correct. This should have been upheld by the caller,
// otherwise lengths less than N will compare as true without any
// checking.
debug_assert!(self.len() >= N);
// If there are remaining tails, load the last N bytes in the slices to
// avoid falling back to per-byte checking.
if !self_rem.is_empty() {
if let (Some(a_rem), Some(b_rem)) = (self.last_chunk::<N>(), other.last_chunk::<N>()) {
if !eq_ignore_ascii_inner(a_rem, b_rem) {
return false;
}
}
}
true
}
/// Converts this slice to its ASCII upper case equivalent in-place.
///
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',

View file

@ -4,6 +4,7 @@ use super::{from_raw_parts, memchr};
use crate::ascii;
use crate::cmp::{self, BytewiseEq, Ordering};
use crate::intrinsics::compare_bytes;
use crate::mem::SizedTypeProperties;
use crate::num::NonZero;
use crate::ops::ControlFlow;
@ -15,7 +16,14 @@ where
{
#[inline]
fn eq(&self, other: &[U]) -> bool {
SlicePartialEq::equal(self, other)
let len = self.len();
if len == other.len() {
// SAFETY: Just checked that they're the same length, and the pointers
// come from references-to-slices so they're guaranteed readable.
unsafe { SlicePartialEq::equal_same_length(self.as_ptr(), other.as_ptr(), len) }
} else {
false
}
}
}
@ -95,12 +103,14 @@ impl<T: PartialOrd> PartialOrd for [T] {
// intermediate trait for specialization of slice's PartialEq
#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
const trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
/// # Safety
/// `lhs` and `rhs` are both readable for `len` elements
unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool;
}
// Generic slice equality
#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
impl<A, B> const SlicePartialEq<B> for [A]
impl<A, B> const SlicePartialEq<B> for A
where
A: [const] PartialEq<B>,
{
@ -109,19 +119,15 @@ where
// such as in `<str as PartialEq>::eq`.
// The codegen backend can still inline it later if needed.
#[rustc_no_mir_inline]
default fn equal(&self, other: &[B]) -> bool {
if self.len() != other.len() {
return false;
}
default unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
// Implemented as explicit indexing rather
// than zipped iterators for performance reasons.
// See PR https://github.com/rust-lang/rust/pull/116846
// FIXME(const_hack): make this a `for idx in 0..self.len()` loop.
// FIXME(const_hack): make this a `for idx in 0..len` loop.
let mut idx = 0;
while idx < self.len() {
// bound checks are optimized away
if self[idx] != other[idx] {
while idx < len {
// SAFETY: idx < len, so both are in-bounds and readable
if unsafe { *lhs.add(idx) != *rhs.add(idx) } {
return false;
}
idx += 1;
@ -134,30 +140,18 @@ where
// When each element can be compared byte-wise, we can compare all the bytes
// from the whole size in one call to the intrinsics.
#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
impl<A, B> const SlicePartialEq<B> for [A]
impl<A, B> const SlicePartialEq<B> for A
where
A: [const] BytewiseEq<B>,
{
// This is usually a pretty good backend inlining candidate because the
// intrinsic tends to just be `memcmp`. However, as of 2025-12 letting
// MIR inline this makes reuse worse because it means that, for example,
// `String::eq` doesn't inline, whereas by keeping this from inling all
// the wrappers until the call to this disappear. If the heuristics have
// changed and this is no longer fruitful, though, please do remove it.
// In the mean time, it's fine to not inline it in MIR because the backend
// will still inline it if it things it's important to do so.
#[rustc_no_mir_inline]
#[inline]
fn equal(&self, other: &[B]) -> bool {
if self.len() != other.len() {
return false;
}
// SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
// The two slices have been checked to have the same size above.
unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
// SAFETY: by our precondition, `lhs` and `rhs` are guaranteed to be valid
// for reading `len` values, which also means the size is guaranteed
// not to overflow because it exists in memory;
unsafe {
let size = size_of_val(self);
compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
let size = crate::intrinsics::unchecked_mul(len, Self::SIZE);
compare_bytes(lhs as _, rhs as _, size) == 0
}
}
}

View file

@ -2520,7 +2520,7 @@ impl<T> [T] {
/// )));
/// assert_eq!(s.split_once(|&x| x == 0), None);
/// ```
#[unstable(feature = "slice_split_once", reason = "newly added", issue = "112811")]
#[unstable(feature = "slice_split_once", issue = "112811")]
#[inline]
pub fn split_once<F>(&self, pred: F) -> Option<(&[T], &[T])>
where
@ -2548,7 +2548,7 @@ impl<T> [T] {
/// )));
/// assert_eq!(s.rsplit_once(|&x| x == 0), None);
/// ```
#[unstable(feature = "slice_split_once", reason = "newly added", issue = "112811")]
#[unstable(feature = "slice_split_once", issue = "112811")]
#[inline]
pub fn rsplit_once<F>(&self, pred: F) -> Option<(&[T], &[T])>
where

View file

@ -50,8 +50,8 @@ macro_rules! bench_mask_kind {
($mask_kind:ident, $mask:expr) => {
mod $mask_kind {
use super::{Data, ITERATIONS, U};
bench_template!(U::gather_bits, gather_bits, $mask);
bench_template!(U::scatter_bits, scatter_bits, $mask);
bench_template!(U::extract_bits, extract_bits, $mask);
bench_template!(U::deposit_bits, deposit_bits, $mask);
}
};
}

View file

@ -5,6 +5,7 @@ use test::{Bencher, black_box};
mod char_count;
mod corpora;
mod debug;
mod eq_ignore_ascii_case;
mod iter;
#[bench]

View file

@ -0,0 +1,45 @@
use test::{Bencher, black_box};
use super::corpora::*;
#[bench]
fn bench_str_under_8_bytes_eq(b: &mut Bencher) {
let s = black_box("foo");
let other = black_box("foo");
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}
#[bench]
fn bench_str_of_8_bytes_eq(b: &mut Bencher) {
let s = black_box(en::TINY);
let other = black_box(en::TINY);
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}
#[bench]
fn bench_str_17_bytes_eq(b: &mut Bencher) {
let s = black_box(&en::SMALL[..17]);
let other = black_box(&en::SMALL[..17]);
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}
#[bench]
fn bench_str_31_bytes_eq(b: &mut Bencher) {
let s = black_box(&en::SMALL[..31]);
let other = black_box(&en::SMALL[..31]);
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}
#[bench]
fn bench_medium_str_eq(b: &mut Bencher) {
let s = black_box(en::MEDIUM);
let other = black_box(en::MEDIUM);
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}
#[bench]
fn bench_large_str_eq(b: &mut Bencher) {
let s = black_box(en::LARGE);
let other = black_box(en::LARGE);
b.iter(|| assert!(s.eq_ignore_ascii_case(other)))
}

View file

@ -163,3 +163,132 @@ fn test_pointers() {
_ => unreachable!(),
}
}
#[test]
fn test_dynamic_traits() {
use std::collections::HashSet;
use std::mem::type_info::DynTraitPredicate;
trait A<T> {}
trait B<const CONST_NUM: i32> {
type Foo;
}
trait FooTrait<'a, 'b, const CONST_NUM: i32> {}
trait ProjectorTrait<'a, 'b> {}
fn preds_of<T: ?Sized + 'static>() -> &'static [DynTraitPredicate] {
match const { Type::of::<T>() }.kind {
TypeKind::DynTrait(d) => d.predicates,
_ => unreachable!(),
}
}
fn pred<'a>(preds: &'a [DynTraitPredicate], want: TypeId) -> &'a DynTraitPredicate {
preds
.iter()
.find(|p| p.trait_ty.ty == want)
.unwrap_or_else(|| panic!("missing predicate for {want:?}"))
}
fn assert_typeid_set_eq(actual: &[TypeId], expected: &[TypeId]) {
let actual_set: HashSet<TypeId> = actual.iter().copied().collect();
let expected_set: HashSet<TypeId> = expected.iter().copied().collect();
assert_eq!(actual.len(), actual_set.len(), "duplicates present: {actual:?}");
assert_eq!(
actual_set, expected_set,
"unexpected ids.\nactual: {actual:?}\nexpected: {expected:?}"
);
}
fn assert_predicates_exact(preds: &[DynTraitPredicate], expected_pred_ids: &[TypeId]) {
let actual_pred_ids: Vec<TypeId> = preds.iter().map(|p| p.trait_ty.ty).collect();
assert_typeid_set_eq(&actual_pred_ids, expected_pred_ids);
}
// dyn Send
{
let preds = preds_of::<dyn Send>();
assert_predicates_exact(preds, &[TypeId::of::<dyn Send>()]);
let p = pred(preds, TypeId::of::<dyn Send>());
assert!(p.trait_ty.is_auto);
}
// dyn A<i32>
{
let preds = preds_of::<dyn A<i32>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn A<i32>>()]);
let p = pred(preds, TypeId::of::<dyn A<i32>>());
assert!(!p.trait_ty.is_auto);
}
// dyn B<5, Foo = i32>
{
let preds = preds_of::<dyn B<5, Foo = i32>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn B<5, Foo = i32>>()]);
let e = pred(preds, TypeId::of::<dyn B<5, Foo = i32>>());
assert!(!e.trait_ty.is_auto);
}
// dyn for<'a> FooTrait<'a, 'a, 7>
{
let preds = preds_of::<dyn for<'a> FooTrait<'a, 'a, 7>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn for<'a> FooTrait<'a, 'a, 7>>()]);
let foo = pred(preds, TypeId::of::<dyn for<'a> FooTrait<'a, 'a, 7>>());
assert!(!foo.trait_ty.is_auto);
}
// dyn FooTrait<'static, 'static, 7>
{
let preds = preds_of::<dyn FooTrait<'static, 'static, 7>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn FooTrait<'static, 'static, 7>>()]);
let foo = pred(preds, TypeId::of::<dyn FooTrait<'static, 'static, 7>>());
assert!(!foo.trait_ty.is_auto);
}
// dyn for<'a, 'b> FooTrait<'a, 'b, 7>
{
let preds = preds_of::<dyn for<'a, 'b> FooTrait<'a, 'b, 7>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn for<'a, 'b> FooTrait<'a, 'b, 7>>()]);
let foo = pred(preds, TypeId::of::<dyn for<'a, 'b> FooTrait<'a, 'b, 7>>());
assert!(!foo.trait_ty.is_auto);
}
// dyn for<'a, 'b> ProjectorTrait<'a, 'b>
{
let preds = preds_of::<dyn for<'a, 'b> ProjectorTrait<'a, 'b>>();
assert_predicates_exact(preds, &[TypeId::of::<dyn for<'a, 'b> ProjectorTrait<'a, 'b>>()]);
let proj = pred(preds, TypeId::of::<dyn for<'a, 'b> ProjectorTrait<'a, 'b>>());
assert!(!proj.trait_ty.is_auto);
}
// dyn for<'a> FooTrait<'a, 'a, 7> + Send + Sync
{
let preds = preds_of::<dyn for<'a> FooTrait<'a, 'a, 7> + Send + Sync>();
assert_predicates_exact(
preds,
&[
TypeId::of::<dyn for<'a> FooTrait<'a, 'a, 7>>(),
TypeId::of::<dyn Send>(),
TypeId::of::<dyn Sync>(),
],
);
let foo = pred(preds, TypeId::of::<dyn for<'a> FooTrait<'a, 'a, 7>>());
assert!(!foo.trait_ty.is_auto);
let send = pred(preds, TypeId::of::<dyn Send>());
assert!(send.trait_ty.is_auto);
let sync = pred(preds, TypeId::of::<dyn Sync>());
assert!(sync.trait_ty.is_auto);
}
}

View file

@ -127,50 +127,50 @@ macro_rules! uint_module {
assert_eq_const_safe!($T: _1.swap_bytes(), _1);
}
fn test_gather_bits() {
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0000_0011), 0b_0001);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0000_0110), 0b_0010);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0000_1100), 0b_0001);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0001_1000), 0b_0000);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0011_0000), 0b_0010);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b0110_0000), 0b_0001);
assert_eq_const_safe!($T: $T::gather_bits(0b1010_0101, 0b1100_0000), 0b_0010);
fn test_extract_bits() {
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0000_0011), 0b_0001);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0000_0110), 0b_0010);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0000_1100), 0b_0001);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0001_1000), 0b_0000);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0011_0000), 0b_0010);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b0110_0000), 0b_0001);
assert_eq_const_safe!($T: $T::extract_bits(0b1010_0101, 0b1100_0000), 0b_0010);
assert_eq_const_safe!($T: A.gather_bits(_0), 0);
assert_eq_const_safe!($T: B.gather_bits(_0), 0);
assert_eq_const_safe!($T: C.gather_bits(_0), 0);
assert_eq_const_safe!($T: _0.gather_bits(A), 0);
assert_eq_const_safe!($T: _0.gather_bits(B), 0);
assert_eq_const_safe!($T: _0.gather_bits(C), 0);
assert_eq_const_safe!($T: A.extract_bits(_0), 0);
assert_eq_const_safe!($T: B.extract_bits(_0), 0);
assert_eq_const_safe!($T: C.extract_bits(_0), 0);
assert_eq_const_safe!($T: _0.extract_bits(A), 0);
assert_eq_const_safe!($T: _0.extract_bits(B), 0);
assert_eq_const_safe!($T: _0.extract_bits(C), 0);
assert_eq_const_safe!($T: A.gather_bits(_1), A);
assert_eq_const_safe!($T: B.gather_bits(_1), B);
assert_eq_const_safe!($T: C.gather_bits(_1), C);
assert_eq_const_safe!($T: _1.gather_bits(0b0010_0001), 0b0000_0011);
assert_eq_const_safe!($T: _1.gather_bits(0b0010_1100), 0b0000_0111);
assert_eq_const_safe!($T: _1.gather_bits(0b0111_1001), 0b0001_1111);
assert_eq_const_safe!($T: A.extract_bits(_1), A);
assert_eq_const_safe!($T: B.extract_bits(_1), B);
assert_eq_const_safe!($T: C.extract_bits(_1), C);
assert_eq_const_safe!($T: _1.extract_bits(0b0010_0001), 0b0000_0011);
assert_eq_const_safe!($T: _1.extract_bits(0b0010_1100), 0b0000_0111);
assert_eq_const_safe!($T: _1.extract_bits(0b0111_1001), 0b0001_1111);
}
fn test_scatter_bits() {
assert_eq_const_safe!($T: $T::scatter_bits(0b1111, 0b1001_0110), 0b1001_0110);
assert_eq_const_safe!($T: $T::scatter_bits(0b0001, 0b1001_0110), 0b0000_0010);
assert_eq_const_safe!($T: $T::scatter_bits(0b0010, 0b1001_0110), 0b0000_0100);
assert_eq_const_safe!($T: $T::scatter_bits(0b0100, 0b1001_0110), 0b0001_0000);
assert_eq_const_safe!($T: $T::scatter_bits(0b1000, 0b1001_0110), 0b1000_0000);
fn test_deposit_bits() {
assert_eq_const_safe!($T: $T::deposit_bits(0b1111, 0b1001_0110), 0b1001_0110);
assert_eq_const_safe!($T: $T::deposit_bits(0b0001, 0b1001_0110), 0b0000_0010);
assert_eq_const_safe!($T: $T::deposit_bits(0b0010, 0b1001_0110), 0b0000_0100);
assert_eq_const_safe!($T: $T::deposit_bits(0b0100, 0b1001_0110), 0b0001_0000);
assert_eq_const_safe!($T: $T::deposit_bits(0b1000, 0b1001_0110), 0b1000_0000);
assert_eq_const_safe!($T: A.scatter_bits(_0), 0);
assert_eq_const_safe!($T: B.scatter_bits(_0), 0);
assert_eq_const_safe!($T: C.scatter_bits(_0), 0);
assert_eq_const_safe!($T: _0.scatter_bits(A), 0);
assert_eq_const_safe!($T: _0.scatter_bits(B), 0);
assert_eq_const_safe!($T: _0.scatter_bits(C), 0);
assert_eq_const_safe!($T: A.deposit_bits(_0), 0);
assert_eq_const_safe!($T: B.deposit_bits(_0), 0);
assert_eq_const_safe!($T: C.deposit_bits(_0), 0);
assert_eq_const_safe!($T: _0.deposit_bits(A), 0);
assert_eq_const_safe!($T: _0.deposit_bits(B), 0);
assert_eq_const_safe!($T: _0.deposit_bits(C), 0);
assert_eq_const_safe!($T: A.scatter_bits(_1), A);
assert_eq_const_safe!($T: B.scatter_bits(_1), B);
assert_eq_const_safe!($T: C.scatter_bits(_1), C);
assert_eq_const_safe!($T: _1.scatter_bits(A), A);
assert_eq_const_safe!($T: _1.scatter_bits(B), B);
assert_eq_const_safe!($T: _1.scatter_bits(C), C);
assert_eq_const_safe!($T: A.deposit_bits(_1), A);
assert_eq_const_safe!($T: B.deposit_bits(_1), B);
assert_eq_const_safe!($T: C.deposit_bits(_1), C);
assert_eq_const_safe!($T: _1.deposit_bits(A), A);
assert_eq_const_safe!($T: _1.deposit_bits(B), B);
assert_eq_const_safe!($T: _1.deposit_bits(C), C);
}
fn test_reverse_bits() {
@ -389,7 +389,7 @@ macro_rules! uint_module {
#[cfg(not(miri))] // Miri is too slow
#[test]
fn test_lots_of_gather_scatter() {
fn test_lots_of_extract_deposit() {
// Generate a handful of bit patterns to use as inputs
let xs = {
let mut xs = vec![];
@ -414,7 +414,7 @@ macro_rules! uint_module {
for sparse in sparse_masks {
// Collect the set bits to sequential low bits
let dense = sparse.gather_bits(sparse);
let dense = sparse.extract_bits(sparse);
let count = sparse.count_ones();
assert_eq!(count, dense.count_ones());
assert_eq!(count, dense.trailing_ones());
@ -424,27 +424,27 @@ macro_rules! uint_module {
let mut bit = 1 as $T;
for _ in 0..count {
let lowest_one = t.isolate_lowest_one();
assert_eq!(lowest_one, bit.scatter_bits(sparse));
assert_eq!(bit, lowest_one.gather_bits(sparse));
assert_eq!(lowest_one, bit.deposit_bits(sparse));
assert_eq!(bit, lowest_one.extract_bits(sparse));
t ^= lowest_one;
bit <<= 1;
}
// Other bits are ignored
assert_eq!(0, bit.wrapping_neg().scatter_bits(sparse));
assert_eq!(0, (!sparse).gather_bits(sparse));
assert_eq!(0, bit.wrapping_neg().deposit_bits(sparse));
assert_eq!(0, (!sparse).extract_bits(sparse));
for &x in &xs {
// Gather bits from `x & sparse` to `dense`
let dx = x.gather_bits(sparse);
let dx = x.extract_bits(sparse);
assert_eq!(dx & !dense, 0);
// Scatter bits from `x & dense` to `sparse`
let sx = x.scatter_bits(sparse);
let sx = x.deposit_bits(sparse);
assert_eq!(sx & !sparse, 0);
// The other recovers the input (within the mask)
assert_eq!(dx.scatter_bits(sparse), x & sparse);
assert_eq!(sx.gather_bits(sparse), x & dense);
assert_eq!(dx.deposit_bits(sparse), x & sparse);
assert_eq!(sx.extract_bits(sparse), x & dense);
}
}
}

View file

@ -30,19 +30,19 @@ impl Drop for TokenStream {
}
impl<S> Encode<S> for TokenStream {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
mem::ManuallyDrop::new(self).handle.encode(w, s);
}
}
impl<S> Encode<S> for &TokenStream {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.handle.encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for TokenStream {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
TokenStream { handle: handle::Handle::decode(r, s) }
}
}
@ -56,23 +56,17 @@ impl !Send for Span {}
impl !Sync for Span {}
impl<S> Encode<S> for Span {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.handle.encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for Span {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
Span { handle: handle::Handle::decode(r, s) }
}
}
// FIXME(eddyb) generate these impls by pattern-matching on the
// names of methods - also could use the presence of `fn drop`
// to distinguish between 'owned and 'interned, above.
// Alternatively, special "modes" could be listed of types in with_api
// instead of pattern matching on methods, here and in server decl.
impl Clone for TokenStream {
fn clone(&self) -> Self {
Methods::ts_clone(self)
@ -104,10 +98,7 @@ pub(crate) use super::symbol::Symbol;
macro_rules! define_client_side {
(
Methods {
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
},
$($name:ident),* $(,)?
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
) => {
impl Methods {
$(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)? {
@ -115,7 +106,7 @@ macro_rules! define_client_side {
let mut buf = bridge.cached_buffer.take();
buf.clear();
api_tags::Method::$method.encode(&mut buf, &mut ());
ApiTags::$method.encode(&mut buf, &mut ());
$($arg.encode(&mut buf, &mut ());)*
buf = bridge.dispatch.call(buf);
@ -130,7 +121,7 @@ macro_rules! define_client_side {
}
}
}
with_api!(self, self, define_client_side);
with_api!(self, define_client_side);
struct Bridge<'a> {
/// Reusable buffer (only `clear`-ed, never shrunk), primarily

View file

@ -13,92 +13,76 @@ use std::ops::{Bound, Range};
use std::sync::Once;
use std::{fmt, marker, mem, panic, thread};
use crate::{Delimiter, Level, Spacing};
use crate::{Delimiter, Level};
/// Higher-order macro describing the server RPC API, allowing automatic
/// generation of type-safe Rust APIs, both client-side and server-side.
///
/// `with_api!(MySelf, my_self, my_macro)` expands to:
/// `with_api!(MySelf, my_macro)` expands to:
/// ```rust,ignore (pseudo-code)
/// my_macro! {
/// Methods {
/// // ...
/// fn lit_character(ch: char) -> MySelf::Literal;
/// // ...
/// fn lit_span(my_self: &MySelf::Literal) -> MySelf::Span;
/// fn lit_set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
/// },
/// Literal,
/// Span,
/// fn lit_character(ch: char) -> MySelf::Literal;
/// fn lit_span(lit: &MySelf::Literal) -> MySelf::Span;
/// fn lit_set_span(lit: &mut MySelf::Literal, span: MySelf::Span);
/// // ...
/// }
/// ```
///
/// The first two arguments serve to customize the arguments names
/// and argument/return types, to enable several different usecases:
///
/// If `my_self` is just `self`, then each `fn` signature can be used
/// as-is for a method. If it's anything else (`self_` in practice),
/// then the signatures don't have a special `self` argument, and
/// can, therefore, have a different one introduced.
/// The first argument serves to customize the argument/return types,
/// to enable several different usecases:
///
/// If `MySelf` is just `Self`, then the types are only valid inside
/// a trait or a trait impl, where the trait has associated types
/// for each of the API types. If non-associated types are desired,
/// a module name (`self` in practice) can be used instead of `Self`.
macro_rules! with_api {
($S:ident, $self:ident, $m:ident) => {
($S:ident, $m:ident) => {
$m! {
Methods {
fn injected_env_var(var: &str) -> Option<String>;
fn track_env_var(var: &str, value: Option<&str>);
fn track_path(path: &str);
fn literal_from_str(s: &str) -> Result<Literal<$S::Span, $S::Symbol>, ()>;
fn emit_diagnostic(diagnostic: Diagnostic<$S::Span>);
fn injected_env_var(var: &str) -> Option<String>;
fn track_env_var(var: &str, value: Option<&str>);
fn track_path(path: &str);
fn literal_from_str(s: &str) -> Result<Literal<$S::Span, $S::Symbol>, ()>;
fn emit_diagnostic(diagnostic: Diagnostic<$S::Span>);
fn ts_drop(stream: $S::TokenStream);
fn ts_clone(stream: &$S::TokenStream) -> $S::TokenStream;
fn ts_is_empty(stream: &$S::TokenStream) -> bool;
fn ts_expand_expr(stream: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
fn ts_from_str(src: &str) -> $S::TokenStream;
fn ts_to_string(stream: &$S::TokenStream) -> String;
fn ts_from_token_tree(
tree: TokenTree<$S::TokenStream, $S::Span, $S::Symbol>,
) -> $S::TokenStream;
fn ts_concat_trees(
base: Option<$S::TokenStream>,
trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>,
) -> $S::TokenStream;
fn ts_concat_streams(
base: Option<$S::TokenStream>,
streams: Vec<$S::TokenStream>,
) -> $S::TokenStream;
fn ts_into_trees(
stream: $S::TokenStream
) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>;
fn ts_drop(stream: $S::TokenStream);
fn ts_clone(stream: &$S::TokenStream) -> $S::TokenStream;
fn ts_is_empty(stream: &$S::TokenStream) -> bool;
fn ts_expand_expr(stream: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
fn ts_from_str(src: &str) -> $S::TokenStream;
fn ts_to_string(stream: &$S::TokenStream) -> String;
fn ts_from_token_tree(
tree: TokenTree<$S::TokenStream, $S::Span, $S::Symbol>,
) -> $S::TokenStream;
fn ts_concat_trees(
base: Option<$S::TokenStream>,
trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>,
) -> $S::TokenStream;
fn ts_concat_streams(
base: Option<$S::TokenStream>,
streams: Vec<$S::TokenStream>,
) -> $S::TokenStream;
fn ts_into_trees(
stream: $S::TokenStream
) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>;
fn span_debug(span: $S::Span) -> String;
fn span_parent(span: $S::Span) -> Option<$S::Span>;
fn span_source(span: $S::Span) -> $S::Span;
fn span_byte_range(span: $S::Span) -> Range<usize>;
fn span_start(span: $S::Span) -> $S::Span;
fn span_end(span: $S::Span) -> $S::Span;
fn span_line(span: $S::Span) -> usize;
fn span_column(span: $S::Span) -> usize;
fn span_file(span: $S::Span) -> String;
fn span_local_file(span: $S::Span) -> Option<String>;
fn span_join(span: $S::Span, other: $S::Span) -> Option<$S::Span>;
fn span_subspan(span: $S::Span, start: Bound<usize>, end: Bound<usize>) -> Option<$S::Span>;
fn span_resolved_at(span: $S::Span, at: $S::Span) -> $S::Span;
fn span_source_text(span: $S::Span) -> Option<String>;
fn span_save_span(span: $S::Span) -> usize;
fn span_recover_proc_macro_span(id: usize) -> $S::Span;
fn span_debug(span: $S::Span) -> String;
fn span_parent(span: $S::Span) -> Option<$S::Span>;
fn span_source(span: $S::Span) -> $S::Span;
fn span_byte_range(span: $S::Span) -> Range<usize>;
fn span_start(span: $S::Span) -> $S::Span;
fn span_end(span: $S::Span) -> $S::Span;
fn span_line(span: $S::Span) -> usize;
fn span_column(span: $S::Span) -> usize;
fn span_file(span: $S::Span) -> String;
fn span_local_file(span: $S::Span) -> Option<String>;
fn span_join(span: $S::Span, other: $S::Span) -> Option<$S::Span>;
fn span_subspan(span: $S::Span, start: Bound<usize>, end: Bound<usize>) -> Option<$S::Span>;
fn span_resolved_at(span: $S::Span, at: $S::Span) -> $S::Span;
fn span_source_text(span: $S::Span) -> Option<String>;
fn span_save_span(span: $S::Span) -> usize;
fn span_recover_proc_macro_span(id: usize) -> $S::Span;
fn symbol_normalize_and_validate_ident(string: &str) -> Result<$S::Symbol, ()>;
},
TokenStream,
Span,
Symbol,
fn symbol_normalize_and_validate_ident(string: &str) -> Result<$S::Symbol, ()>;
}
};
}
@ -129,7 +113,7 @@ mod symbol;
use buffer::Buffer;
pub use rpc::PanicMessage;
use rpc::{Decode, Encode, Reader, Writer};
use rpc::{Decode, Encode};
/// Configuration for establishing an active connection between a server and a
/// client. The server creates the bridge config (`run_server` in `server.rs`),
@ -151,26 +135,18 @@ pub struct BridgeConfig<'a> {
impl !Send for BridgeConfig<'_> {}
impl !Sync for BridgeConfig<'_> {}
#[forbid(unsafe_code)]
#[allow(non_camel_case_types)]
mod api_tags {
use super::rpc::{Decode, Encode, Reader, Writer};
macro_rules! declare_tags {
(
Methods {
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
},
$($name:ident),* $(,)?
) => {
pub(super) enum Method {
$($method),*
}
rpc_encode_decode!(enum Method { $($method),* });
macro_rules! declare_tags {
(
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
) => {
#[allow(non_camel_case_types)]
pub(super) enum ApiTags {
$($method),*
}
rpc_encode_decode!(enum ApiTags { $($method),* });
}
with_api!(self, self, declare_tags);
}
with_api!(self, declare_tags);
/// Helper to wrap associated types to allow trait impl dispatch.
/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
@ -179,11 +155,6 @@ mod api_tags {
trait Mark {
type Unmarked;
fn mark(unmarked: Self::Unmarked) -> Self;
}
/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
trait Unmark {
type Unmarked;
fn unmark(self) -> Self::Unmarked;
}
@ -198,25 +169,19 @@ impl<T, M> Mark for Marked<T, M> {
fn mark(unmarked: Self::Unmarked) -> Self {
Marked { value: unmarked, _marker: marker::PhantomData }
}
}
impl<T, M> Unmark for Marked<T, M> {
type Unmarked = T;
fn unmark(self) -> Self::Unmarked {
self.value
}
}
impl<'a, T, M> Unmark for &'a Marked<T, M> {
impl<'a, T, M> Mark for &'a Marked<T, M> {
type Unmarked = &'a T;
fn mark(_: Self::Unmarked) -> Self {
unreachable!()
}
fn unmark(self) -> Self::Unmarked {
&self.value
}
}
impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
type Unmarked = &'a mut T;
fn unmark(self) -> Self::Unmarked {
&mut self.value
}
}
impl<T: Mark> Mark for Vec<T> {
type Unmarked = Vec<T::Unmarked>;
@ -224,9 +189,6 @@ impl<T: Mark> Mark for Vec<T> {
// Should be a no-op due to std's in-place collect optimizations.
unmarked.into_iter().map(T::mark).collect()
}
}
impl<T: Unmark> Unmark for Vec<T> {
type Unmarked = Vec<T::Unmarked>;
fn unmark(self) -> Self::Unmarked {
// Should be a no-op due to std's in-place collect optimizations.
self.into_iter().map(T::unmark).collect()
@ -241,9 +203,6 @@ macro_rules! mark_noop {
fn mark(unmarked: Self::Unmarked) -> Self {
unmarked
}
}
impl Unmark for $ty {
type Unmarked = Self;
fn unmark(self) -> Self::Unmarked {
self
}
@ -254,8 +213,6 @@ macro_rules! mark_noop {
mark_noop! {
(),
bool,
char,
&'_ [u8],
&'_ str,
String,
u8,
@ -263,7 +220,6 @@ mark_noop! {
Delimiter,
LitKind,
Level,
Spacing,
}
rpc_encode_decode!(
@ -282,12 +238,6 @@ rpc_encode_decode!(
Help,
}
);
rpc_encode_decode!(
enum Spacing {
Alone,
Joint,
}
);
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum LitKind {
@ -333,13 +283,9 @@ macro_rules! mark_compound {
$($field: Mark::mark(unmarked.$field)),*
}
}
}
impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
type Unmarked = $name <$($T::Unmarked),+>;
fn unmark(self) -> Self::Unmarked {
$name {
$($field: Unmark::unmark(self.$field)),*
$($field: Mark::unmark(self.$field)),*
}
}
}
@ -354,14 +300,10 @@ macro_rules! mark_compound {
})*
}
}
}
impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
type Unmarked = $name <$($T::Unmarked),+>;
fn unmark(self) -> Self::Unmarked {
match self {
$($name::$variant $(($field))? => {
$name::$variant $((Unmark::unmark($field)))?
$name::$variant $((Mark::unmark($field)))?
})*
}
}

View file

@ -4,28 +4,26 @@ use std::any::Any;
use std::io::Write;
use std::num::NonZero;
pub(super) type Writer = super::buffer::Buffer;
use super::buffer::Buffer;
pub(super) trait Encode<S>: Sized {
fn encode(self, w: &mut Writer, s: &mut S);
fn encode(self, w: &mut Buffer, s: &mut S);
}
pub(super) type Reader<'a> = &'a [u8];
pub(super) trait Decode<'a, 's, S>: Sized {
fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
fn decode(r: &mut &'a [u8], s: &'s mut S) -> Self;
}
macro_rules! rpc_encode_decode {
(le $ty:ty) => {
impl<S> Encode<S> for $ty {
fn encode(self, w: &mut Writer, _: &mut S) {
fn encode(self, w: &mut Buffer, _: &mut S) {
w.extend_from_array(&self.to_le_bytes());
}
}
impl<S> Decode<'_, '_, S> for $ty {
fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
fn decode(r: &mut &[u8], _: &mut S) -> Self {
const N: usize = size_of::<$ty>();
let mut bytes = [0; N];
@ -38,7 +36,7 @@ macro_rules! rpc_encode_decode {
};
(struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
$(self.$field.encode(w, s);)*
}
}
@ -46,7 +44,7 @@ macro_rules! rpc_encode_decode {
impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S>
for $name $(<$($T),+>)?
{
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
$name {
$($field: Decode::decode(r, s)),*
}
@ -55,10 +53,12 @@ macro_rules! rpc_encode_decode {
};
(enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
// HACK(eddyb): `Tag` enum duplicated between the
// two impls as there's no other place to stash it.
#[repr(u8)] enum Tag { $($variant),* }
#[allow(non_camel_case_types)]
#[repr(u8)]
enum Tag { $($variant),* }
match self {
$($name::$variant $(($field))* => {
@ -72,10 +72,10 @@ macro_rules! rpc_encode_decode {
impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S>
for $name $(<$($T),+>)?
{
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
// HACK(eddyb): `Tag` enum duplicated between the
// two impls as there's no other place to stash it.
#[allow(non_upper_case_globals)]
#[allow(non_upper_case_globals, non_camel_case_types)]
mod tag {
#[repr(u8)] enum Tag { $($variant),* }
@ -95,21 +95,21 @@ macro_rules! rpc_encode_decode {
}
impl<S> Encode<S> for () {
fn encode(self, _: &mut Writer, _: &mut S) {}
fn encode(self, _: &mut Buffer, _: &mut S) {}
}
impl<S> Decode<'_, '_, S> for () {
fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
fn decode(_: &mut &[u8], _: &mut S) -> Self {}
}
impl<S> Encode<S> for u8 {
fn encode(self, w: &mut Writer, _: &mut S) {
fn encode(self, w: &mut Buffer, _: &mut S) {
w.push(self);
}
}
impl<S> Decode<'_, '_, S> for u8 {
fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
fn decode(r: &mut &[u8], _: &mut S) -> Self {
let x = r[0];
*r = &r[1..];
x
@ -120,13 +120,13 @@ rpc_encode_decode!(le u32);
rpc_encode_decode!(le usize);
impl<S> Encode<S> for bool {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
(self as u8).encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for bool {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
match u8::decode(r, s) {
0 => false,
1 => true,
@ -135,32 +135,20 @@ impl<S> Decode<'_, '_, S> for bool {
}
}
impl<S> Encode<S> for char {
fn encode(self, w: &mut Writer, s: &mut S) {
(self as u32).encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for char {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
char::from_u32(u32::decode(r, s)).unwrap()
}
}
impl<S> Encode<S> for NonZero<u32> {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.get().encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for NonZero<u32> {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
Self::new(u32::decode(r, s)).unwrap()
}
}
impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.0.encode(w, s);
self.1.encode(w, s);
}
@ -169,53 +157,42 @@ impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
impl<'a, S, A: for<'s> Decode<'a, 's, S>, B: for<'s> Decode<'a, 's, S>> Decode<'a, '_, S>
for (A, B)
{
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
(Decode::decode(r, s), Decode::decode(r, s))
}
}
impl<S> Encode<S> for &[u8] {
fn encode(self, w: &mut Writer, s: &mut S) {
self.len().encode(w, s);
w.write_all(self).unwrap();
}
}
impl<'a, S> Decode<'a, '_, S> for &'a [u8] {
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
let len = usize::decode(r, s);
let xs = &r[..len];
*r = &r[len..];
xs
}
}
impl<S> Encode<S> for &str {
fn encode(self, w: &mut Writer, s: &mut S) {
self.as_bytes().encode(w, s);
fn encode(self, w: &mut Buffer, s: &mut S) {
let bytes = self.as_bytes();
bytes.len().encode(w, s);
w.write_all(bytes).unwrap();
}
}
impl<'a, S> Decode<'a, '_, S> for &'a str {
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
let len = usize::decode(r, s);
let xs = &r[..len];
*r = &r[len..];
str::from_utf8(xs).unwrap()
}
}
impl<S> Encode<S> for String {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self[..].encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for String {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
<&str>::decode(r, s).to_string()
}
}
impl<S, T: Encode<S>> Encode<S> for Vec<T> {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.len().encode(w, s);
for x in self {
x.encode(w, s);
@ -224,7 +201,7 @@ impl<S, T: Encode<S>> Encode<S> for Vec<T> {
}
impl<'a, S, T: for<'s> Decode<'a, 's, S>> Decode<'a, '_, S> for Vec<T> {
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
let len = usize::decode(r, s);
let mut vec = Vec::with_capacity(len);
for _ in 0..len {
@ -278,13 +255,13 @@ impl PanicMessage {
}
impl<S> Encode<S> for PanicMessage {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.as_str().encode(w, s);
}
}
impl<S> Decode<'_, '_, S> for PanicMessage {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
match Option::<String>::decode(r, s) {
Some(s) => PanicMessage::String(s),
None => PanicMessage::Unknown,

View file

@ -5,12 +5,12 @@ use std::marker::PhantomData;
use super::*;
pub(super) struct HandleStore<S: Types> {
token_stream: handle::OwnedStore<Marked<S::TokenStream, client::TokenStream>>,
span: handle::InternedStore<Marked<S::Span, client::Span>>,
pub(super) struct HandleStore<S: Server> {
token_stream: handle::OwnedStore<MarkedTokenStream<S>>,
span: handle::InternedStore<MarkedSpan<S>>,
}
impl<S: Types> HandleStore<S> {
impl<S: Server> HandleStore<S> {
fn new(handle_counters: &'static client::HandleCounters) -> Self {
HandleStore {
token_stream: handle::OwnedStore::new(&handle_counters.token_stream),
@ -19,52 +19,54 @@ impl<S: Types> HandleStore<S> {
}
}
impl<S: Types> Encode<HandleStore<S>> for Marked<S::TokenStream, client::TokenStream> {
fn encode(self, w: &mut Writer, s: &mut HandleStore<S>) {
pub(super) type MarkedTokenStream<S> = Marked<<S as Server>::TokenStream, client::TokenStream>;
pub(super) type MarkedSpan<S> = Marked<<S as Server>::Span, client::Span>;
pub(super) type MarkedSymbol<S> = Marked<<S as Server>::Symbol, client::Symbol>;
impl<S: Server> Encode<HandleStore<S>> for MarkedTokenStream<S> {
fn encode(self, w: &mut Buffer, s: &mut HandleStore<S>) {
s.token_stream.alloc(self).encode(w, s);
}
}
impl<S: Types> Decode<'_, '_, HandleStore<S>> for Marked<S::TokenStream, client::TokenStream> {
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<S>) -> Self {
impl<S: Server> Decode<'_, '_, HandleStore<S>> for MarkedTokenStream<S> {
fn decode(r: &mut &[u8], s: &mut HandleStore<S>) -> Self {
s.token_stream.take(handle::Handle::decode(r, &mut ()))
}
}
impl<'s, S: Types> Decode<'_, 's, HandleStore<S>>
for &'s Marked<S::TokenStream, client::TokenStream>
{
fn decode(r: &mut Reader<'_>, s: &'s mut HandleStore<S>) -> Self {
impl<'s, S: Server> Decode<'_, 's, HandleStore<S>> for &'s MarkedTokenStream<S> {
fn decode(r: &mut &[u8], s: &'s mut HandleStore<S>) -> Self {
&s.token_stream[handle::Handle::decode(r, &mut ())]
}
}
impl<S: Types> Encode<HandleStore<S>> for Marked<S::Span, client::Span> {
fn encode(self, w: &mut Writer, s: &mut HandleStore<S>) {
impl<S: Server> Encode<HandleStore<S>> for MarkedSpan<S> {
fn encode(self, w: &mut Buffer, s: &mut HandleStore<S>) {
s.span.alloc(self).encode(w, s);
}
}
impl<S: Types> Decode<'_, '_, HandleStore<S>> for Marked<S::Span, client::Span> {
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<S>) -> Self {
impl<S: Server> Decode<'_, '_, HandleStore<S>> for MarkedSpan<S> {
fn decode(r: &mut &[u8], s: &mut HandleStore<S>) -> Self {
s.span.copy(handle::Handle::decode(r, &mut ()))
}
}
pub trait Types {
type TokenStream: 'static + Clone;
type Span: 'static + Copy + Eq + Hash;
type Symbol: 'static;
struct Dispatcher<S: Server> {
handle_store: HandleStore<S>,
server: S,
}
macro_rules! declare_server_traits {
macro_rules! define_server_dispatcher_impl {
(
Methods {
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
},
$($name:ident),* $(,)?
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
) => {
pub trait Server: Types {
pub trait Server {
type TokenStream: 'static + Clone;
type Span: 'static + Copy + Eq + Hash;
type Symbol: 'static;
fn globals(&mut self) -> ExpnGlobals<Self::Span>;
/// Intern a symbol received from RPC
@ -75,39 +77,28 @@ macro_rules! declare_server_traits {
$(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?;)*
}
}
}
with_api!(Self, self_, declare_server_traits);
struct Dispatcher<S: Types> {
handle_store: HandleStore<S>,
server: S,
}
macro_rules! define_dispatcher_impl {
(
Methods {
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
},
$($name:ident),* $(,)?
) => {
// FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
pub trait DispatcherTrait {
// HACK(eddyb) these are here to allow `Self::$name` to work below.
$(type $name;)*
type TokenStream;
type Span;
type Symbol;
fn dispatch(&mut self, buf: Buffer) -> Buffer;
}
impl<S: Server> DispatcherTrait for Dispatcher<S> {
$(type $name = Marked<S::$name, client::$name>;)*
type TokenStream = MarkedTokenStream<S>;
type Span = MarkedSpan<S>;
type Symbol = MarkedSymbol<S>;
fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
let Dispatcher { handle_store, server } = self;
let mut reader = &buf[..];
match api_tags::Method::decode(&mut reader, &mut ()) {
$(api_tags::Method::$method => {
match ApiTags::decode(&mut reader, &mut ()) {
$(ApiTags::$method => {
let mut call_method = || {
$(let $arg = <$arg_ty>::decode(&mut reader, handle_store).unmark();)*
let r = server.$method($($arg),*);
@ -136,7 +127,7 @@ macro_rules! define_dispatcher_impl {
}
}
}
with_api!(Self, self_, define_dispatcher_impl);
with_api!(Self, define_server_dispatcher_impl);
pub trait ExecutionStrategy {
fn run_bridge_and_client(
@ -303,7 +294,7 @@ fn run_server<
let globals = dispatcher.server.globals();
let mut buf = Buffer::new();
(<ExpnGlobals<Marked<S::Span, client::Span>> as Mark>::mark(globals), input)
(<ExpnGlobals<MarkedSpan<S>> as Mark>::mark(globals), input)
.encode(&mut buf, &mut dispatcher.handle_store);
buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
@ -328,13 +319,11 @@ impl client::Client<crate::TokenStream, crate::TokenStream> {
strategy,
handle_counters,
server,
<Marked<S::TokenStream, client::TokenStream>>::mark(input),
<MarkedTokenStream<S>>::mark(input),
run,
force_show_panics,
)
.map(|s| {
<Option<Marked<S::TokenStream, client::TokenStream>>>::unmark(s).unwrap_or_default()
})
.map(|s| <Option<MarkedTokenStream<S>>>::unmark(s).unwrap_or_default())
}
}
@ -356,15 +345,10 @@ impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream
strategy,
handle_counters,
server,
(
<Marked<S::TokenStream, client::TokenStream>>::mark(input),
<Marked<S::TokenStream, client::TokenStream>>::mark(input2),
),
(<MarkedTokenStream<S>>::mark(input), <MarkedTokenStream<S>>::mark(input2)),
run,
force_show_panics,
)
.map(|s| {
<Option<Marked<S::TokenStream, client::TokenStream>>>::unmark(s).unwrap_or_default()
})
.map(|s| <Option<MarkedTokenStream<S>>>::unmark(s).unwrap_or_default())
}
}

View file

@ -94,25 +94,25 @@ impl fmt::Display for Symbol {
}
impl<S> Encode<S> for Symbol {
fn encode(self, w: &mut Writer, s: &mut S) {
fn encode(self, w: &mut Buffer, s: &mut S) {
self.with(|sym| sym.encode(w, s))
}
}
impl<S: server::Server> Decode<'_, '_, server::HandleStore<S>> for Marked<S::Symbol, Symbol> {
fn decode(r: &mut Reader<'_>, s: &mut server::HandleStore<S>) -> Self {
impl<S: server::Server> Decode<'_, '_, server::HandleStore<S>> for server::MarkedSymbol<S> {
fn decode(r: &mut &[u8], s: &mut server::HandleStore<S>) -> Self {
Mark::mark(S::intern_symbol(<&str>::decode(r, s)))
}
}
impl<S: server::Server> Encode<server::HandleStore<S>> for Marked<S::Symbol, Symbol> {
fn encode(self, w: &mut Writer, s: &mut server::HandleStore<S>) {
impl<S: server::Server> Encode<server::HandleStore<S>> for server::MarkedSymbol<S> {
fn encode(self, w: &mut Buffer, s: &mut server::HandleStore<S>) {
S::with_symbol_string(&self.unmark(), |sym| sym.encode(w, s))
}
}
impl<S> Decode<'_, '_, S> for Symbol {
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
fn decode(r: &mut &[u8], s: &mut S) -> Self {
Symbol::new(<&str>::decode(r, s))
}
}

View file

@ -26,7 +26,7 @@ hashbrown = { version = "0.16.1", default-features = false, features = [
std_detect = { path = "../std_detect", public = true }
# Dependencies of the `backtrace` crate
rustc-demangle = { version = "0.1.24", features = ['rustc-dep-of-std'] }
rustc-demangle = { version = "0.1.27", features = ['rustc-dep-of-std'] }
[target.'cfg(not(all(windows, target_env = "msvc", not(target_vendor = "uwp"))))'.dependencies]
miniz_oxide = { version = "0.8.0", optional = true, default-features = false }
@ -55,8 +55,8 @@ object = { version = "0.37.1", default-features = false, optional = true, featur
'archive',
] }
[target.'cfg(any(windows, target_os = "cygwin"))'.dependencies.windows-targets]
path = "../windows_targets"
[target.'cfg(any(windows, target_os = "cygwin"))'.dependencies.windows-link]
path = "../windows_link"
[dev-dependencies]
rand = { version = "0.9.0", default-features = false, features = ["alloc"] }
@ -115,7 +115,6 @@ backtrace-trace-only = []
panic-unwind = ["dep:panic_unwind"]
compiler-builtins-c = ["alloc/compiler-builtins-c"]
compiler-builtins-mem = ["alloc/compiler-builtins-mem"]
compiler-builtins-no-f16-f128 = ["alloc/compiler-builtins-no-f16-f128"]
llvm-libunwind = ["unwind/llvm-libunwind"]
system-llvm-libunwind = ["unwind/system-llvm-libunwind"]
@ -130,7 +129,7 @@ llvm_enzyme = ["core/llvm_enzyme"]
# Enable using raw-dylib for Windows imports.
# This will eventually be the default.
windows_raw_dylib = ["windows-targets/windows_raw_dylib"]
windows_raw_dylib = ["windows-link/windows_raw_dylib"]
[package.metadata.fortanix-sgx]
# Maximum possible number of threads when testing

View file

@ -7,7 +7,6 @@
//!
//! [`collections`]: crate::collections
#[allow(deprecated)]
use super::{BuildHasher, Hasher, SipHasher13};
use crate::cell::Cell;
use crate::fmt;
@ -81,7 +80,6 @@ impl RandomState {
impl BuildHasher for RandomState {
type Hasher = DefaultHasher;
#[inline]
#[allow(deprecated)]
fn build_hasher(&self) -> DefaultHasher {
DefaultHasher(SipHasher13::new_with_keys(self.k0, self.k1))
}
@ -91,7 +89,6 @@ impl BuildHasher for RandomState {
///
/// The internal algorithm is not specified, and so it and its hashes should
/// not be relied upon over releases.
#[allow(deprecated)]
#[derive(Clone, Debug)]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub struct DefaultHasher(SipHasher13);
@ -104,7 +101,6 @@ impl DefaultHasher {
/// instances created through `new` or `default`.
#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
#[inline]
#[allow(deprecated)]
#[rustc_const_unstable(feature = "const_default", issue = "143894")]
#[must_use]
pub const fn new() -> DefaultHasher {

View file

@ -309,6 +309,7 @@
#![feature(staged_api)]
#![feature(stmt_expr_attributes)]
#![feature(strict_provenance_lints)]
#![feature(target_feature_inline_always)]
#![feature(thread_local)]
#![feature(try_blocks)]
#![feature(try_trait_v2)]

View file

@ -20,7 +20,7 @@ const HEAP_ZERO_MEMORY: u32 = 0x00000008;
// always return the same handle, which remains valid for the entire lifetime of the process.
//
// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-getprocessheap
windows_targets::link!("kernel32.dll" "system" fn GetProcessHeap() -> c::HANDLE);
windows_link::link!("kernel32.dll" "system" fn GetProcessHeap() -> c::HANDLE);
// Allocate a block of `dwBytes` bytes of memory from a given heap `hHeap`.
// The allocated memory may be uninitialized, or zeroed if `dwFlags` is
@ -36,7 +36,7 @@ windows_targets::link!("kernel32.dll" "system" fn GetProcessHeap() -> c::HANDLE)
// Note that `dwBytes` is allowed to be zero, contrary to some other allocators.
//
// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapalloc
windows_targets::link!("kernel32.dll" "system" fn HeapAlloc(hheap: c::HANDLE, dwflags: u32, dwbytes: usize) -> *mut c_void);
windows_link::link!("kernel32.dll" "system" fn HeapAlloc(hheap: c::HANDLE, dwflags: u32, dwbytes: usize) -> *mut c_void);
// Reallocate a block of memory behind a given pointer `lpMem` from a given heap `hHeap`,
// to a block of at least `dwBytes` bytes, either shrinking the block in place,
@ -57,7 +57,7 @@ windows_targets::link!("kernel32.dll" "system" fn HeapAlloc(hheap: c::HANDLE, dw
// Note that `dwBytes` is allowed to be zero, contrary to some other allocators.
//
// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heaprealloc
windows_targets::link!("kernel32.dll" "system" fn HeapReAlloc(
windows_link::link!("kernel32.dll" "system" fn HeapReAlloc(
hheap: c::HANDLE,
dwflags : u32,
lpmem: *const c_void,
@ -78,7 +78,7 @@ windows_targets::link!("kernel32.dll" "system" fn HeapReAlloc(
// Note that `lpMem` is allowed to be null, which will not cause the operation to fail.
//
// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapfree
windows_targets::link!("kernel32.dll" "system" fn HeapFree(hheap: c::HANDLE, dwflags: u32, lpmem: *const c_void) -> c::BOOL);
windows_link::link!("kernel32.dll" "system" fn HeapFree(hheap: c::HANDLE, dwflags: u32, lpmem: *const c_void) -> c::BOOL);
fn get_process_heap() -> *mut c_void {
// SAFETY: GetProcessHeap simply returns a valid handle or NULL so is always safe to call.

View file

@ -580,7 +580,8 @@ mod uefi_fs {
use crate::path::Path;
use crate::ptr::NonNull;
use crate::sys::pal::helpers::{self, UefiBox};
use crate::sys::time::{self, SystemTime};
use crate::sys::pal::system_time;
use crate::sys::time::SystemTime;
pub(crate) struct File {
protocol: NonNull<file::Protocol>,
@ -879,7 +880,7 @@ mod uefi_fs {
/// conversion to SystemTime, we use the current time to get the timezone in such cases.
pub(crate) fn uefi_to_systemtime(mut time: r_efi::efi::Time) -> Option<SystemTime> {
time.timezone = if time.timezone == r_efi::efi::UNSPECIFIED_TIMEZONE {
time::system_time_internal::now().timezone
system_time::now().timezone
} else {
time.timezone
};
@ -888,7 +889,7 @@ mod uefi_fs {
/// Convert to UEFI Time with the current timezone.
pub(crate) fn systemtime_to_uefi(time: SystemTime) -> r_efi::efi::Time {
let now = time::system_time_internal::now();
let now = system_time::now();
time.to_uefi_loose(now.timezone, now.daylight)
}

View file

@ -1822,7 +1822,7 @@ impl File {
_ => {
#[cfg(all(target_os = "linux", target_env = "gnu", target_pointer_width = "32", not(target_arch = "riscv32")))]
{
use crate::sys::{time::__timespec64, weak::weak};
use crate::sys::pal::{time::__timespec64, weak::weak};
// Added in glibc 2.34
weak!(
@ -2258,7 +2258,7 @@ fn set_times_impl(p: &CStr, times: FileTimes, follow_symlinks: bool) -> io::Resu
let flags = if follow_symlinks { 0 } else { libc::AT_SYMLINK_NOFOLLOW };
#[cfg(all(target_os = "linux", target_env = "gnu", target_pointer_width = "32", not(target_arch = "riscv32")))]
{
use crate::sys::{time::__timespec64, weak::weak};
use crate::sys::pal::{time::__timespec64, weak::weak};
// Added in glibc 2.34
weak!(

View file

@ -26,6 +26,7 @@ pub mod stdio;
pub mod sync;
pub mod thread;
pub mod thread_local;
pub mod time;
// FIXME(117276): remove this, move feature implementations into individual
// submodules.

Some files were not shown because too many files have changed in this diff Show more