Auto merge of #150277 - JonathanBrouwer:rollup-gvmdw36, r=JonathanBrouwer

Rollup of 4 pull requests

Successful merges:

 - rust-lang/rust#150098 (remove `legacy_const_generic_args` cache)
 - rust-lang/rust#150155 (fix ICE when {{root}} appears in import suggestions)
 - rust-lang/rust#150267 (`rust-analyzer` subtree update)
 - rust-lang/rust#150274 (Fix typo)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-12-22 17:03:07 +00:00
commit 04813e4de8
197 changed files with 8007 additions and 4399 deletions

View file

@ -1810,7 +1810,7 @@ pub enum ExprKind {
/// or a `gen` block (`gen move { ... }`).
///
/// The span is the "decl", which is the header before the body `{ }`
/// including the `asyng`/`gen` keywords and possibly `move`.
/// including the `async`/`gen` keywords and possibly `move`.
Gen(CaptureBy, Box<Block>, GenBlockKind, Span),
/// An await expression (`my_future.await`). Span is of await keyword.
Await(Box<Expr>, Span),

View file

@ -114,7 +114,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
ExprKind::Tup(elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
ExprKind::Call(f, args) => {
if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f, self.tcx)
{
self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
} else {
let f = self.lower_expr(f);

View file

@ -47,13 +47,14 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::spawn;
use rustc_data_structures::tagged_ptr::TaggedRef;
use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle};
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId};
use rustc_hir::definitions::{DefPathData, DisambiguatorState};
use rustc_hir::lints::DelayedLint;
use rustc_hir::{
self as hir, AngleBrackets, ConstArg, GenericArg, HirId, ItemLocalMap, LifetimeSource,
LifetimeSyntax, ParamName, Target, TraitCandidate,
LifetimeSyntax, ParamName, Target, TraitCandidate, find_attr,
};
use rustc_index::{Idx, IndexSlice, IndexVec};
use rustc_macros::extension;
@ -236,7 +237,7 @@ impl SpanLowerer {
#[extension(trait ResolverAstLoweringExt)]
impl ResolverAstLowering {
fn legacy_const_generic_args(&self, expr: &Expr) -> Option<Vec<usize>> {
fn legacy_const_generic_args(&self, expr: &Expr, tcx: TyCtxt<'_>) -> Option<Vec<usize>> {
let ExprKind::Path(None, path) = &expr.kind else {
return None;
};
@ -256,11 +257,12 @@ impl ResolverAstLowering {
return None;
}
if let Some(v) = self.legacy_const_generic_args.get(&def_id) {
return v.clone();
}
None
find_attr!(
// we can use parsed attrs here since for other crates they're already available
tcx.get_all_attrs(def_id),
AttributeKind::RustcLegacyConstGenerics{fn_indexes,..} => fn_indexes
)
.map(|fn_indexes| fn_indexes.iter().map(|(num, _)| *num).collect())
}
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes> {

View file

@ -31,7 +31,7 @@ use rustc_ast::AttrVec;
use rustc_ast::expand::typetree::{FncTree, Kind, Type, TypeTree};
use rustc_ast::node_id::NodeMap;
pub use rustc_ast_ir::{Movability, Mutability, try_visit};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::intern::Interned;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::steal::Steal;
@ -196,7 +196,6 @@ pub struct ResolverGlobalCtxt {
/// This struct is meant to be consumed by lowering.
#[derive(Debug)]
pub struct ResolverAstLowering {
pub legacy_const_generic_args: FxHashMap<DefId, Option<Vec<usize>>>,
/// Resolutions for nodes that have a single resolution.
pub partial_res_map: NodeMap<hir::def::PartialRes>,
/// Resolutions for import nodes, which have multiple resolutions in different namespaces.

View file

@ -2223,7 +2223,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
match binding.kind {
NameBindingKind::Import { import, .. } => {
for segment in import.module_path.iter().skip(1) {
path.push(segment.ident);
// Don't include `{{root}}` in suggestions - it's an internal symbol
// that should never be shown to users.
if segment.ident.name != kw::PathRoot {
path.push(segment.ident);
}
}
sugg_paths.push((
path.iter().cloned().chain(std::iter::once(ident)).collect::<Vec<_>>(),

View file

@ -1271,7 +1271,6 @@ pub struct Resolver<'ra, 'tcx> {
/// and how the `impl Trait` fragments were introduced.
invocation_parents: FxHashMap<LocalExpnId, InvocationParent>,
legacy_const_generic_args: FxHashMap<DefId, Option<Vec<usize>>>,
/// Amount of lifetime parameters for each item in the crate.
item_generics_num_lifetimes: FxHashMap<LocalDefId, usize>,
delegation_fn_sigs: LocalDefIdMap<DelegationFnSig>,
@ -1676,7 +1675,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
node_id_to_def_id,
disambiguator: DisambiguatorState::new(),
placeholder_field_indices: Default::default(),
legacy_const_generic_args: Default::default(),
invocation_parents,
item_generics_num_lifetimes: Default::default(),
trait_impls: Default::default(),
@ -1807,7 +1805,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
stripped_cfg_items,
};
let ast_lowering = ty::ResolverAstLowering {
legacy_const_generic_args: self.legacy_const_generic_args,
partial_res_map: self.partial_res_map,
import_res_map: self.import_res_map,
label_res_map: self.label_res_map,
@ -2416,15 +2413,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
return None;
}
let indexes = find_attr!(
find_attr!(
// we can use parsed attrs here since for other crates they're already available
self.tcx.get_all_attrs(def_id),
AttributeKind::RustcLegacyConstGenerics{fn_indexes,..} => fn_indexes
)
.map(|fn_indexes| fn_indexes.iter().map(|(num, _)| *num).collect());
self.legacy_const_generic_args.insert(def_id, indexes.clone());
indexes
.map(|fn_indexes| fn_indexes.iter().map(|(num, _)| *num).collect())
}
fn resolve_main(&mut self) {

View file

@ -1,3 +1,8 @@
> [!IMPORTANT]
> We have enacted a feature freeze for IDE assists to cope with the PR backlog as well as allowing us to prepare for the rowan transition!
> If you submit a PR that **adds** new ide-assists, chances are very high that we will just close it on this basis alone until we have the capacity to deal with them again.
# Contributing to rust-analyzer
Thank you for your interest in contributing to rust-analyzer! There are many ways to contribute
@ -28,3 +33,11 @@ possibility of someone putting a lot of work into a feature that is then going t
it out of scope (be it due to generally not fitting in with rust-analyzer, or just not having the
maintenance capacity). If there already is a feature issue open but it is not clear whether it is
considered accepted feel free to just drop a comment and ask!
## Use of AI tools
AI tool use is not discouraged on the rust-analyzer codebase, as long as it meets our quality standards.
We kindly ask you to disclose usage of AI tools in your contributions.
If you used them without disclosing it, we may reject your contribution on that basis alone due to the assumption that you likely not reviewed your own submission (so why should we?).
We may still reject AI-assisted contributions if we deem the quality of the contribution to be unsatisfactory as to reduce impact on the team's review budget.

View file

@ -788,6 +788,7 @@ dependencies = [
"itertools 0.14.0",
"ra-ap-rustc_type_ir",
"rustc-hash 2.1.1",
"serde_json",
"smallvec",
"span",
"stdx",
@ -901,6 +902,8 @@ dependencies = [
"rustc_apfloat",
"salsa",
"salsa-macros",
"serde",
"serde_derive",
"smallvec",
"span",
"stdx",
@ -1214,7 +1217,9 @@ version = "0.0.0"
dependencies = [
"dashmap",
"hashbrown 0.14.5",
"rayon",
"rustc-hash 2.1.1",
"smallvec",
"triomphe",
]
@ -2040,9 +2045,9 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "ra-ap-rustc_abi"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce480c45c05462cf6b700468118201b00132613a968a1849da5f7a555c0f1db9"
checksum = "1d49dbe5d570793b3c3227972a6ac85fc3e830f09b32c3cb3b68cfceebad3b0a"
dependencies = [
"bitflags 2.9.4",
"ra-ap-rustc_hashes",
@ -2052,34 +2057,33 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_ast_ir"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "453da2376de406d740ca28412a31ae3d5a6039cd45698c1c2fb01b577dff64ae"
checksum = "cd0956db62c264a899d15667993cbbd2e8f0b02108712217e2579c61ac30b94b"
[[package]]
name = "ra-ap-rustc_hashes"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf411a55deaa3ea348594c8273fb2d1200265bf87b881b40c62b32f75caf8323"
checksum = "7df512084c24f4c96c8cc9a59cbd264301efbc8913d3759b065398024af316c9"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d0dd4cf1417ea8a809e9e7bf296c6ce6e05b75b043483872d1bd2951a08142c"
checksum = "bca3a49a928d38ba7927605e5909b6abe77d09ff359e4695c070c3f91d69cc8a"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
]
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1b0d218fb91f8969716a962142c722d88b3cd3fd1f7ef03093261bf37e85dfd"
checksum = "4463e908a62c64c2a65c1966c2f4995d0e1f8b7dfc85a8b8de2562edf3d89070"
dependencies = [
"proc-macro2",
"quote",
@ -2088,9 +2092,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ec7c26e92c44d5433b29cf661faf0027e263b70a411d0f28996bd67e3bdb57e"
checksum = "228e01e1b237adb4bd8793487e1c37019c1e526a8f93716d99602301be267056"
dependencies = [
"memchr",
"unicode-properties",
@ -2099,9 +2103,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_next_trait_solver"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "029686fdbc8a058cf3d81ad157e1cdc81a37b9de0400289ccb86a62465484313"
checksum = "10d6f91143011d474bb844d268b0784c6a4c6db57743558b83f5ad34511627f1"
dependencies = [
"derive-where",
"ra-ap-rustc_index",
@ -2112,9 +2116,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "509d279f1e87acc33476da3fbd05a6054e9ffeb4427cb38ba01b9d2656aec268"
checksum = "37fa8effbc436c0ddd9d7b1421aa3cccf8b94566c841c4e4aa3e09063b8f423f"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper 0.0.5",
@ -2122,9 +2126,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bb2c9930854314b03bd7aab060a14bca6f194b76381a4c309e3905ec3a02bbc"
checksum = "883c843fc27847ad03b8e772dd4a2d2728af4333a6d6821a22dfcfe7136dff3e"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@ -2135,9 +2139,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_type_ir"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4a92a3e4dbdebb0d4c9caceb52eff45c4df784d21fb2da90dac50e218f95c0"
checksum = "a86e33c46b2b261a173b23f207461a514812a8b2d2d7935bbc685f733eacce10"
dependencies = [
"arrayvec",
"bitflags 2.9.4",
@ -2155,9 +2159,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_type_ir_macros"
version = "0.139.0"
version = "0.143.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca368eca2472367f2e6fdfb431c8342e99d848e4ce89cb20dd3b3bdcc43cbc28"
checksum = "15034c2fcaa5cf302aea6db20eda0f71fffeb0b372d6073cc50f940e974a2a47"
dependencies = [
"proc-macro2",
"quote",
@ -2445,9 +2449,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.24.0"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27956164373aeec733ac24ff1736de8541234e3a8e7e6f916b28175b5752af3b"
checksum = "e2e2aa2fca57727371eeafc975acc8e6f4c52f8166a78035543f6ee1c74c2dcc"
dependencies = [
"boxcar",
"crossbeam-queue",
@ -2470,15 +2474,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.24.0"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ca3b9d6e47c08b5de4b218e0c5f7ec910b51bce6314e651c8e7b9d154d174da"
checksum = "1bfc2a1e7bf06964105515451d728f2422dedc3a112383324a00b191a5c397a3"
[[package]]
name = "salsa-macros"
version = "0.24.0"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6337b62f2968be6b8afa30017d7564ecbde6832ada47ed2261fb14d0fd402ff4"
checksum = "3d844c1aa34946da46af683b5c27ec1088a3d9d84a2b837a108223fd830220e1"
dependencies = [
"proc-macro2",
"quote",

View file

@ -86,14 +86,14 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.139", default-features = false }
ra-ap-rustc_parse_format = { version = "0.139", default-features = false }
ra-ap-rustc_index = { version = "0.139", default-features = false }
ra-ap-rustc_abi = { version = "0.139", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.139", default-features = false }
ra-ap-rustc_ast_ir = { version = "0.139", default-features = false }
ra-ap-rustc_type_ir = { version = "0.139", default-features = false }
ra-ap-rustc_next_trait_solver = { version = "0.139", default-features = false }
ra-ap-rustc_lexer = { version = "0.143", default-features = false }
ra-ap-rustc_parse_format = { version = "0.143", default-features = false }
ra-ap-rustc_index = { version = "0.143", default-features = false }
ra-ap-rustc_abi = { version = "0.143", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.143", default-features = false }
ra-ap-rustc_ast_ir = { version = "0.143", default-features = false }
ra-ap-rustc_type_ir = { version = "0.143", default-features = false }
ra-ap-rustc_next_trait_solver = { version = "0.143", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@ -135,13 +135,13 @@ rayon = "1.10.0"
rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.24.0", default-features = false, features = [
salsa = { version = "0.25.2", default-features = false, features = [
"rayon",
"salsa_unstable",
"macros",
"inventory",
] }
salsa-macros = "0.24.0"
salsa-macros = "0.25.2"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }

View file

@ -351,6 +351,8 @@ pub struct CrateData<Id> {
/// declared in source via `extern crate test`.
pub dependencies: Vec<Dependency<Id>>,
pub origin: CrateOrigin,
/// Extra crate-level attributes, including the surrounding `#![]`.
pub crate_attrs: Box<[Box<str>]>,
pub is_proc_macro: bool,
/// The working directory to run proc-macros in invoked in the context of this crate.
/// This is the workspace root of the cargo workspace for workspace members, the crate manifest
@ -465,7 +467,7 @@ impl Crate {
/// including the crate itself.
///
/// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
pub fn transitive_deps(self, db: &dyn salsa::Database) -> Box<[Crate]> {
pub fn transitive_deps(self, db: &dyn salsa::Database) -> Vec<Crate> {
// There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
// and removing that is a bit difficult.
let mut worklist = vec![self];
@ -480,7 +482,7 @@ impl Crate {
worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id));
}
deps.into_boxed_slice()
deps
}
/// Returns all transitive reverse dependencies of the given crate,
@ -530,6 +532,7 @@ impl CrateGraphBuilder {
mut potential_cfg_options: Option<CfgOptions>,
mut env: Env,
origin: CrateOrigin,
crate_attrs: Vec<String>,
is_proc_macro: bool,
proc_macro_cwd: Arc<AbsPathBuf>,
ws_data: Arc<CrateWorkspaceData>,
@ -539,12 +542,17 @@ impl CrateGraphBuilder {
if let Some(potential_cfg_options) = &mut potential_cfg_options {
potential_cfg_options.shrink_to_fit();
}
let crate_attrs: Vec<_> = crate_attrs
.into_iter()
.map(|raw_attr| format!("#![{raw_attr}]").into_boxed_str())
.collect();
self.arena.alloc(CrateBuilder {
basic: CrateData {
root_file_id,
edition,
dependencies: Vec::new(),
origin,
crate_attrs: crate_attrs.into_boxed_slice(),
is_proc_macro,
proc_macro_cwd,
},
@ -648,6 +656,7 @@ impl CrateGraphBuilder {
edition: krate.basic.edition,
is_proc_macro: krate.basic.is_proc_macro,
origin: krate.basic.origin.clone(),
crate_attrs: krate.basic.crate_attrs.clone(),
root_file_id: krate.basic.root_file_id,
proc_macro_cwd: krate.basic.proc_macro_cwd.clone(),
};
@ -975,6 +984,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -988,6 +998,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1001,6 +1012,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1034,6 +1046,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1047,6 +1060,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1075,6 +1089,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1088,6 +1103,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1101,6 +1117,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1129,6 +1146,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
@ -1142,6 +1160,7 @@ mod tests {
Default::default(),
Env::default(),
CrateOrigin::Local { repo: None, name: None },
Vec::new(),
false,
Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),

View file

@ -56,6 +56,36 @@ pub enum CfgExpr {
Not(Box<CfgExpr>),
}
impl fmt::Display for CfgExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CfgExpr::Atom(atom) => atom.fmt(f),
CfgExpr::All(exprs) => {
write!(f, "all(")?;
for (i, expr) in exprs.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
expr.fmt(f)?;
}
write!(f, ")")
}
CfgExpr::Any(exprs) => {
write!(f, "any(")?;
for (i, expr) in exprs.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
expr.fmt(f)?;
}
write!(f, ")")
}
CfgExpr::Not(expr) => write!(f, "not({})", expr),
CfgExpr::Invalid => write!(f, "invalid"),
}
}
}
impl From<CfgAtom> for CfgExpr {
fn from(atom: CfgAtom) -> Self {
CfgExpr::Atom(atom)

View file

@ -39,7 +39,7 @@ use rustc_abi::ReprOptions;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use syntax::{
AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T,
AstNode, AstToken, NodeOrToken, SmolStr, SourceFile, SyntaxNode, SyntaxToken, T,
ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren},
};
use tt::{TextRange, TextSize};
@ -292,35 +292,69 @@ bitflags::bitflags! {
}
}
pub fn parse_extra_crate_attrs(db: &dyn DefDatabase, krate: Crate) -> Option<SourceFile> {
let crate_data = krate.data(db);
let crate_attrs = &crate_data.crate_attrs;
if crate_attrs.is_empty() {
return None;
}
// All attributes are already enclosed in `#![]`.
let combined = crate_attrs.concat();
let p = SourceFile::parse(&combined, crate_data.edition);
let errs = p.errors();
if !errs.is_empty() {
let base_msg = "Failed to parse extra crate-level attribute";
let crate_name =
krate.extra_data(db).display_name.as_ref().map_or("{unknown}", |name| name.as_str());
let mut errs = errs.iter().peekable();
let mut offset = TextSize::from(0);
for raw_attr in crate_attrs {
let attr_end = offset + TextSize::of(&**raw_attr);
if errs.peeking_take_while(|e| e.range().start() < attr_end).count() > 0 {
tracing::error!("{base_msg} {raw_attr} for crate {crate_name}");
}
offset = attr_end
}
return None;
}
Some(p.tree())
}
fn attrs_source(
db: &dyn DefDatabase,
owner: AttrDefId,
) -> (InFile<ast::AnyHasAttrs>, Option<InFile<ast::Module>>, Crate) {
) -> (InFile<ast::AnyHasAttrs>, Option<InFile<ast::Module>>, Option<SourceFile>, Crate) {
let (owner, krate) = match owner {
AttrDefId::ModuleId(id) => {
let def_map = id.def_map(db);
let (definition, declaration) = match def_map[id].origin {
let krate = def_map.krate();
let (definition, declaration, extra_crate_attrs) = match def_map[id].origin {
ModuleOrigin::CrateRoot { definition } => {
let file = db.parse(definition).tree();
(InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None)
let definition_source = db.parse(definition).tree();
let definition = InFile::new(definition.into(), definition_source.into());
let extra_crate_attrs = parse_extra_crate_attrs(db, krate);
(definition, None, extra_crate_attrs)
}
ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => {
let definition_source = db.parse(definition).tree();
let definition = InFile::new(definition.into(), definition_source.into());
let declaration = InFile::new(declaration_tree_id.file_id(), declaration);
let declaration = declaration.with_value(declaration.to_node(db));
let definition_source = db.parse(definition).tree();
(InFile::new(definition.into(), definition_source.into()), Some(declaration))
(definition, Some(declaration), None)
}
ModuleOrigin::Inline { definition_tree_id, definition } => {
let definition = InFile::new(definition_tree_id.file_id(), definition);
let definition = definition.with_value(definition.to_node(db).into());
(definition, None)
(definition, None, None)
}
ModuleOrigin::BlockExpr { block, .. } => {
let definition = block.to_node(db);
(block.with_value(definition.into()), None)
(block.with_value(definition.into()), None, None)
}
};
return (definition, declaration, def_map.krate());
return (definition, declaration, extra_crate_attrs, krate);
}
AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it),
AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it),
@ -339,7 +373,7 @@ fn attrs_source(
AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
};
(owner, None, krate)
(owner, None, None, krate)
}
fn collect_attrs<BreakValue>(
@ -347,14 +381,15 @@ fn collect_attrs<BreakValue>(
owner: AttrDefId,
mut callback: impl FnMut(Meta) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
let (source, outer_mod_decl, krate) = attrs_source(db, owner);
let (source, outer_mod_decl, extra_crate_attrs, krate) = attrs_source(db, owner);
let extra_attrs = extra_crate_attrs
.into_iter()
.flat_map(|src| src.attrs())
.chain(outer_mod_decl.into_iter().flat_map(|it| it.value.attrs()));
let mut cfg_options = None;
expand_cfg_attr(
outer_mod_decl
.into_iter()
.flat_map(|it| it.value.attrs())
.chain(ast::attrs_including_inner(&source.value)),
extra_attrs.chain(ast::attrs_including_inner(&source.value)),
|| cfg_options.get_or_insert_with(|| krate.cfg_options(db)),
move |meta, _, _, _| callback(meta),
)
@ -1013,10 +1048,12 @@ impl AttrFlags {
pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option<SmolStr> {
let root_file_id = krate.root_file_id(db);
let syntax = db.parse(root_file_id).tree();
let extra_crate_attrs =
parse_extra_crate_attrs(db, krate).into_iter().flat_map(|src| src.attrs());
let mut cfg_options = None;
expand_cfg_attr(
syntax.attrs(),
extra_crate_attrs.chain(syntax.attrs()),
|| cfg_options.get_or_insert(krate.cfg_options(db)),
|attr, _, _, _| {
if let Meta::TokenTree { path, tt } = attr
@ -1231,8 +1268,11 @@ impl AttrFlags {
// We LRU this query because it is only used by IDE.
#[salsa::tracked(returns(ref), lru = 250)]
pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Box<Docs>> {
let (source, outer_mod_decl, krate) = attrs_source(db, owner);
let (source, outer_mod_decl, _extra_crate_attrs, krate) = attrs_source(db, owner);
let inner_attrs_node = source.value.inner_attributes_node();
// Note: we don't have to pass down `_extra_crate_attrs` here, since `extract_docs`
// does not handle crate-level attributes related to docs.
// See: https://doc.rust-lang.org/rustdoc/write-documentation/the-doc-attribute.html#at-the-crate-level
extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node)
}
@ -1480,8 +1520,9 @@ mod tests {
use test_fixture::WithFixture;
use tt::{TextRange, TextSize};
use crate::attrs::IsInnerDoc;
use crate::{attrs::Docs, test_db::TestDB};
use crate::AttrDefId;
use crate::attrs::{AttrFlags, Docs, IsInnerDoc};
use crate::test_db::TestDB;
#[test]
fn docs() {
@ -1617,4 +1658,15 @@ mod tests {
Some((in_file(range(263, 265)), IsInnerDoc::Yes))
);
}
#[test]
fn crate_attrs() {
let fixture = r#"
//- /lib.rs crate:foo crate-attr:no_std crate-attr:cfg(target_arch="x86")
"#;
let (db, file_id) = TestDB::with_single_file(fixture);
let module = db.module_for_file(file_id.file_id(&db));
let attrs = AttrFlags::query(&db, AttrDefId::ModuleId(module));
assert!(attrs.contains(AttrFlags::IS_NO_STD | AttrFlags::HAS_CFG));
}
}

View file

@ -44,6 +44,7 @@ use std::{
};
use ast::{AstNode, StructKind};
use cfg::CfgOptions;
use hir_expand::{
ExpandTo, HirFileId,
mod_path::{ModPath, PathKind},
@ -52,13 +53,17 @@ use hir_expand::{
use intern::Interned;
use la_arena::{Idx, RawIdx};
use rustc_hash::FxHashMap;
use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
use span::{
AstIdNode, Edition, FileAstId, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, Span, SpanAnchor,
SyntaxContext,
};
use stdx::never;
use syntax::{SyntaxKind, ast, match_ast};
use syntax::{SourceFile, SyntaxKind, ast, match_ast};
use thin_vec::ThinVec;
use triomphe::Arc;
use tt::TextRange;
use crate::{BlockId, Lookup, db::DefDatabase};
use crate::{BlockId, Lookup, attrs::parse_extra_crate_attrs, db::DefDatabase};
pub(crate) use crate::item_tree::{
attrs::*,
@ -88,6 +93,33 @@ impl fmt::Debug for RawVisibilityId {
}
}
fn lower_extra_crate_attrs<'a>(
db: &dyn DefDatabase,
crate_attrs_as_src: SourceFile,
file_id: span::EditionedFileId,
cfg_options: &dyn Fn() -> &'a CfgOptions,
) -> AttrsOrCfg {
#[derive(Copy, Clone)]
struct FakeSpanMap {
file_id: span::EditionedFileId,
}
impl syntax_bridge::SpanMapper<Span> for FakeSpanMap {
fn span_for(&self, range: TextRange) -> Span {
Span {
range,
anchor: SpanAnchor {
file_id: self.file_id,
ast_id: NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER,
},
ctx: SyntaxContext::root(self.file_id.edition()),
}
}
}
let span_map = FakeSpanMap { file_id };
AttrsOrCfg::lower(db, &crate_attrs_as_src, cfg_options, span_map)
}
#[salsa_macros::tracked(returns(deref))]
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
@ -98,7 +130,19 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
let top_attrs = ctx.lower_attrs(&file);
let krate = file_id.krate(db);
let root_file_id = krate.root_file_id(db);
let extra_top_attrs = (file_id == root_file_id).then(|| {
parse_extra_crate_attrs(db, krate).map(|crate_attrs| {
let file_id = root_file_id.editioned_file_id(db);
lower_extra_crate_attrs(db, crate_attrs, file_id, &|| ctx.cfg_options())
})
}).flatten();
let top_attrs = match extra_top_attrs {
Some(attrs @ AttrsOrCfg::Enabled { .. }) => attrs.merge(ctx.lower_attrs(&file)),
Some(attrs @ AttrsOrCfg::CfgDisabled(_)) => attrs,
None => ctx.lower_attrs(&file)
};
let mut item_tree = ctx.lower_module_items(&file);
item_tree.top_attrs = top_attrs;
item_tree

View file

@ -16,9 +16,9 @@ use hir_expand::{
attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
mod_path::ModPath,
name::Name,
span_map::SpanMapRef,
};
use intern::{Interned, Symbol, sym};
use span::Span;
use syntax::{AstNode, T, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
@ -42,12 +42,15 @@ impl Default for AttrsOrCfg {
}
impl AttrsOrCfg {
pub(crate) fn lower<'a>(
pub(crate) fn lower<'a, S>(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
cfg_options: &dyn Fn() -> &'a CfgOptions,
span_map: SpanMapRef<'_>,
) -> AttrsOrCfg {
span_map: S,
) -> AttrsOrCfg
where
S: syntax_bridge::SpanMapper<Span> + Copy,
{
let mut attrs = Vec::new();
let result =
collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
@ -55,17 +58,17 @@ impl AttrsOrCfg {
// tracking.
let (span, path_range, input) = match meta {
Meta::NamedKeyValue { path_range, name: _, value } => {
let span = span_map.span_for_range(path_range);
let span = span_map.span_for(path_range);
let input = value.map(|value| {
Box::new(AttrInput::Literal(token_to_literal(
value.text(),
span_map.span_for_range(value.text_range()),
span_map.span_for(value.text_range()),
)))
});
(span, path_range, input)
}
Meta::TokenTree { path, tt } => {
let span = span_map.span_for_range(path.range);
let span = span_map.span_for(path.range);
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
@ -76,7 +79,7 @@ impl AttrsOrCfg {
(span, path.range, input)
}
Meta::Path { path } => {
let span = span_map.span_for_range(path.range);
let span = span_map.span_for(path.range);
(span, path.range, None)
}
};
@ -90,7 +93,7 @@ impl AttrsOrCfg {
.filter(|it| it.kind().is_any_identifier());
ModPath::from_tokens(
db,
&mut |range| span_map.span_for_range(range).ctx,
&mut |range| span_map.span_for(range).ctx,
is_abs,
segments,
)
@ -107,6 +110,44 @@ impl AttrsOrCfg {
None => AttrsOrCfg::Enabled { attrs },
}
}
// Merges two `AttrsOrCfg`s, assuming `self` is placed before `other` in the source code.
// The operation follows these rules:
//
// - If `self` and `other` are both `AttrsOrCfg::Enabled`, the result is a new
// `AttrsOrCfg::Enabled`. It contains the concatenation of `self`'s attributes followed by
// `other`'s.
// - If `self` is `AttrsOrCfg::Enabled` but `other` is `AttrsOrCfg::CfgDisabled`, the result
// is a new `AttrsOrCfg::CfgDisabled`. It contains the concatenation of `self`'s attributes
// followed by `other`'s.
// - If `self` is `AttrsOrCfg::CfgDisabled`, return `self` as-is.
//
// The rationale is that attribute collection is sequential and order-sensitive. This operation
// preserves those semantics when combining attributes from two different sources.
// `AttrsOrCfg::CfgDisabled` marks a point where collection stops due to a false `#![cfg(...)]`
// condition. It acts as a "breakpoint": attributes beyond it are not collected. Therefore,
// when merging, an `AttrsOrCfg::CfgDisabled` on the left-hand side short-circuits the
// operation, while an `AttrsOrCfg::CfgDisabled` on the right-hand side preserves all
// attributes collected up to that point.
//
// Note that this operation is neither commutative nor associative.
pub(crate) fn merge(self, other: AttrsOrCfg) -> AttrsOrCfg {
match (self, other) {
(AttrsOrCfg::Enabled { attrs }, AttrsOrCfg::Enabled { attrs: other_attrs }) => {
let mut v = attrs.0.into_vec();
v.extend(other_attrs.0);
AttrsOrCfg::Enabled { attrs: AttrsOwned(v.into_boxed_slice()) }
}
(AttrsOrCfg::Enabled { attrs }, AttrsOrCfg::CfgDisabled(mut other)) => {
let other_attrs = &mut other.1;
let mut v = attrs.0.into_vec();
v.extend(std::mem::take(&mut other_attrs.0));
other_attrs.0 = v.into_boxed_slice();
AttrsOrCfg::CfgDisabled(other)
}
(this @ AttrsOrCfg::CfgDisabled(_), _) => this,
}
}
}
#[derive(Debug, PartialEq, Eq)]

View file

@ -86,9 +86,9 @@ impl Printer<'_> {
}
fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
let AttrsOrCfg::Enabled { attrs } = attrs else {
w!(self, "#[cfg(false)]{separated_by}");
return;
let (cfg_disabled_expr, attrs) = match attrs {
AttrsOrCfg::Enabled { attrs } => (None, attrs),
AttrsOrCfg::CfgDisabled(inner_box) => (Some(&inner_box.0), &inner_box.1),
};
let inner = if inner { "!" } else { "" };
for attr in &*attrs.as_ref() {
@ -101,6 +101,9 @@ impl Printer<'_> {
separated_by,
);
}
if let Some(expr) = cfg_disabled_expr {
w!(self, "#{inner}[cfg({expr})]{separated_by}");
}
}
fn print_attrs_of(&mut self, of: ModItemId, separated_by: &str) {

View file

@ -244,3 +244,45 @@ pub(self) struct S;
"#]],
)
}
#[test]
fn crate_attrs_should_preserve_order() {
check(
r#"
//- /main.rs crate:foo crate-attr:no_std crate-attr:features(f16) crate-attr:crate_type="bin"
"#,
expect![[r##"
#![no_std]
#![features(f16)]
#![crate_type = "bin"]
"##]],
);
}
#[test]
fn crate_attrs_with_disabled_cfg_injected() {
check(
r#"
//- /main.rs crate:foo crate-attr:no_std crate-attr:cfg(false) crate-attr:features(f16,f128) crate-attr:crate_type="bin"
"#,
expect![[r#"
#![no_std]
#![cfg(false)]
"#]],
);
}
#[test]
fn crate_attrs_with_disabled_cfg_in_source() {
check(
r#"
//- /lib.rs crate:foo crate-attr:no_std
#![cfg(false)]
#![no_core]
"#,
expect![[r#"
#![no_std]
#![cfg(false)]
"#]],
);
}

View file

@ -154,16 +154,10 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
if traits.is_empty() { None } else { Some(traits.into_iter().collect()) }
}
pub enum GenericRequirement {
None,
Minimum(usize),
Exact(usize),
}
macro_rules! language_item_table {
(
$LangItems:ident =>
$( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $method:ident, $target:ident, $generics:expr; )*
$( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $target:ident; )*
) => {
#[allow(non_snake_case)] // FIXME: Should we remove this?
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
@ -226,100 +220,101 @@ macro_rules! language_item_table {
}
language_item_table! { LangItems =>
// Variant name, Name, Getter method name, Target Generic requirements;
Sized, sym::sized, sized_trait, TraitId, GenericRequirement::Exact(0);
MetaSized, sym::meta_sized, sized_trait, TraitId, GenericRequirement::Exact(0);
PointeeSized, sym::pointee_sized, sized_trait, TraitId, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, TraitId, GenericRequirement::Minimum(1);
// Variant name, Name, Target;
Sized, sym::sized, TraitId;
MetaSized, sym::meta_sized, TraitId;
PointeeSized, sym::pointee_sized, TraitId;
Unsize, sym::unsize, TraitId;
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, TraitId, GenericRequirement::None;
StructuralPeq, sym::structural_peq, TraitId;
/// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize).
StructuralTeq, sym::structural_teq, structural_teq_trait, TraitId, GenericRequirement::None;
Copy, sym::copy, copy_trait, TraitId, GenericRequirement::Exact(0);
Clone, sym::clone, clone_trait, TraitId, GenericRequirement::None;
Sync, sym::sync, sync_trait, TraitId, GenericRequirement::Exact(0);
DiscriminantKind, sym::discriminant_kind, discriminant_kind_trait, TraitId, GenericRequirement::None;
StructuralTeq, sym::structural_teq, TraitId;
Copy, sym::copy, TraitId;
Clone, sym::clone, TraitId;
TrivialClone, sym::trivial_clone, TraitId;
Sync, sym::sync, TraitId;
DiscriminantKind, sym::discriminant_kind, TraitId;
/// The associated item of the `DiscriminantKind` trait.
Discriminant, sym::discriminant_type, discriminant_type, TypeAliasId, GenericRequirement::None;
Discriminant, sym::discriminant_type, TypeAliasId;
PointeeTrait, sym::pointee_trait, pointee_trait, TraitId, GenericRequirement::None;
Metadata, sym::metadata_type, metadata_type, TypeAliasId, GenericRequirement::None;
DynMetadata, sym::dyn_metadata, dyn_metadata, StructId, GenericRequirement::None;
PointeeTrait, sym::pointee_trait, TraitId;
Metadata, sym::metadata_type, TypeAliasId;
DynMetadata, sym::dyn_metadata, StructId;
Freeze, sym::freeze, freeze_trait, TraitId, GenericRequirement::Exact(0);
Freeze, sym::freeze, TraitId;
FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, TraitId, GenericRequirement::Exact(0);
FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, FunctionId, GenericRequirement::None;
FnPtrTrait, sym::fn_ptr_trait, TraitId;
FnPtrAddr, sym::fn_ptr_addr, FunctionId;
Drop, sym::drop, drop_trait, TraitId, GenericRequirement::None;
Destruct, sym::destruct, destruct_trait, TraitId, GenericRequirement::None;
Drop, sym::drop, TraitId;
Destruct, sym::destruct, TraitId;
CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, TraitId, GenericRequirement::Minimum(1);
DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, TraitId, GenericRequirement::Minimum(1);
CoerceUnsized, sym::coerce_unsized, TraitId;
DispatchFromDyn, sym::dispatch_from_dyn, TraitId;
// language items relating to transmutability
TransmuteOpts, sym::transmute_opts, transmute_opts, StructId, GenericRequirement::Exact(0);
TransmuteTrait, sym::transmute_trait, transmute_trait, TraitId, GenericRequirement::Exact(3);
TransmuteOpts, sym::transmute_opts, StructId;
TransmuteTrait, sym::transmute_trait, TraitId;
Add, sym::add, add_trait, TraitId, GenericRequirement::Exact(1);
Sub, sym::sub, sub_trait, TraitId, GenericRequirement::Exact(1);
Mul, sym::mul, mul_trait, TraitId, GenericRequirement::Exact(1);
Div, sym::div, div_trait, TraitId, GenericRequirement::Exact(1);
Rem, sym::rem, rem_trait, TraitId, GenericRequirement::Exact(1);
Neg, sym::neg, neg_trait, TraitId, GenericRequirement::Exact(0);
Not, sym::not, not_trait, TraitId, GenericRequirement::Exact(0);
BitXor, sym::bitxor, bitxor_trait, TraitId, GenericRequirement::Exact(1);
BitAnd, sym::bitand, bitand_trait, TraitId, GenericRequirement::Exact(1);
BitOr, sym::bitor, bitor_trait, TraitId, GenericRequirement::Exact(1);
Shl, sym::shl, shl_trait, TraitId, GenericRequirement::Exact(1);
Shr, sym::shr, shr_trait, TraitId, GenericRequirement::Exact(1);
AddAssign, sym::add_assign, add_assign_trait, TraitId, GenericRequirement::Exact(1);
SubAssign, sym::sub_assign, sub_assign_trait, TraitId, GenericRequirement::Exact(1);
MulAssign, sym::mul_assign, mul_assign_trait, TraitId, GenericRequirement::Exact(1);
DivAssign, sym::div_assign, div_assign_trait, TraitId, GenericRequirement::Exact(1);
RemAssign, sym::rem_assign, rem_assign_trait, TraitId, GenericRequirement::Exact(1);
BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, TraitId, GenericRequirement::Exact(1);
BitAndAssign, sym::bitand_assign, bitand_assign_trait, TraitId, GenericRequirement::Exact(1);
BitOrAssign, sym::bitor_assign, bitor_assign_trait, TraitId, GenericRequirement::Exact(1);
ShlAssign, sym::shl_assign, shl_assign_trait, TraitId, GenericRequirement::Exact(1);
ShrAssign, sym::shr_assign, shr_assign_trait, TraitId, GenericRequirement::Exact(1);
Index, sym::index, index_trait, TraitId, GenericRequirement::Exact(1);
IndexMut, sym::index_mut, index_mut_trait, TraitId, GenericRequirement::Exact(1);
Add, sym::add, TraitId;
Sub, sym::sub, TraitId;
Mul, sym::mul, TraitId;
Div, sym::div, TraitId;
Rem, sym::rem, TraitId;
Neg, sym::neg, TraitId;
Not, sym::not, TraitId;
BitXor, sym::bitxor, TraitId;
BitAnd, sym::bitand, TraitId;
BitOr, sym::bitor, TraitId;
Shl, sym::shl, TraitId;
Shr, sym::shr, TraitId;
AddAssign, sym::add_assign, TraitId;
SubAssign, sym::sub_assign, TraitId;
MulAssign, sym::mul_assign, TraitId;
DivAssign, sym::div_assign, TraitId;
RemAssign, sym::rem_assign, TraitId;
BitXorAssign, sym::bitxor_assign, TraitId;
BitAndAssign, sym::bitand_assign, TraitId;
BitOrAssign, sym::bitor_assign, TraitId;
ShlAssign, sym::shl_assign, TraitId;
ShrAssign, sym::shr_assign, TraitId;
Index, sym::index, TraitId;
IndexMut, sym::index_mut, TraitId;
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, StructId, GenericRequirement::None;
UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, StructId, GenericRequirement::None;
VaList, sym::va_list, va_list, StructId, GenericRequirement::None;
UnsafeCell, sym::unsafe_cell, StructId;
UnsafePinned, sym::unsafe_pinned, StructId;
VaList, sym::va_list, StructId;
Deref, sym::deref, deref_trait, TraitId, GenericRequirement::Exact(0);
DerefMut, sym::deref_mut, deref_mut_trait, TraitId, GenericRequirement::Exact(0);
DerefTarget, sym::deref_target, deref_target, TypeAliasId, GenericRequirement::None;
Receiver, sym::receiver, receiver_trait, TraitId, GenericRequirement::None;
ReceiverTarget, sym::receiver_target, receiver_target, TypeAliasId, GenericRequirement::None;
Deref, sym::deref, TraitId;
DerefMut, sym::deref_mut, TraitId;
DerefTarget, sym::deref_target, TypeAliasId;
Receiver, sym::receiver, TraitId;
ReceiverTarget, sym::receiver_target, TypeAliasId;
Fn, sym::fn_, fn_trait, TraitId, GenericRequirement::Exact(1);
FnMut, sym::fn_mut, fn_mut_trait, TraitId, GenericRequirement::Exact(1);
FnOnce, sym::fn_once, fn_once_trait, TraitId, GenericRequirement::Exact(1);
AsyncFn, sym::async_fn, async_fn_trait, TraitId, GenericRequirement::Exact(1);
AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, TraitId, GenericRequirement::Exact(1);
AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, TraitId, GenericRequirement::Exact(1);
Fn, sym::fn_, TraitId;
FnMut, sym::fn_mut, TraitId;
FnOnce, sym::fn_once, TraitId;
AsyncFn, sym::async_fn, TraitId;
AsyncFnMut, sym::async_fn_mut, TraitId;
AsyncFnOnce, sym::async_fn_once, TraitId;
CallRefFuture, sym::call_ref_future, call_ref_future_ty, TypeAliasId, GenericRequirement::None;
CallOnceFuture, sym::call_once_future, call_once_future_ty, TypeAliasId, GenericRequirement::None;
AsyncFnOnceOutput, sym::async_fn_once_output, async_fn_once_output_ty, TypeAliasId, GenericRequirement::None;
CallRefFuture, sym::call_ref_future, TypeAliasId;
CallOnceFuture, sym::call_once_future, TypeAliasId;
AsyncFnOnceOutput, sym::async_fn_once_output, TypeAliasId;
FnOnceOutput, sym::fn_once_output, fn_once_output, TypeAliasId, GenericRequirement::None;
FnOnceOutput, sym::fn_once_output, TypeAliasId;
Future, sym::future_trait, future_trait, TraitId, GenericRequirement::Exact(0);
CoroutineState, sym::coroutine_state, coroutine_state, EnumId, GenericRequirement::None;
Coroutine, sym::coroutine, coroutine_trait, TraitId, GenericRequirement::Minimum(1);
CoroutineReturn, sym::coroutine_return, coroutine_return_ty, TypeAliasId, GenericRequirement::None;
CoroutineYield, sym::coroutine_yield, coroutine_yield_ty, TypeAliasId, GenericRequirement::None;
Unpin, sym::unpin, unpin_trait, TraitId, GenericRequirement::None;
Pin, sym::pin, pin_type, StructId, GenericRequirement::None;
Future, sym::future_trait, TraitId;
CoroutineState, sym::coroutine_state, EnumId;
Coroutine, sym::coroutine, TraitId;
CoroutineReturn, sym::coroutine_return, TypeAliasId;
CoroutineYield, sym::coroutine_yield, TypeAliasId;
Unpin, sym::unpin, TraitId;
Pin, sym::pin, StructId;
PartialEq, sym::eq, eq_trait, TraitId, GenericRequirement::Exact(1);
PartialOrd, sym::partial_ord, partial_ord_trait, TraitId, GenericRequirement::Exact(1);
CVoid, sym::c_void, c_void, EnumId, GenericRequirement::None;
PartialEq, sym::eq, TraitId;
PartialOrd, sym::partial_ord, TraitId;
CVoid, sym::c_void, EnumId;
// A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
// various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
@ -328,107 +323,107 @@ language_item_table! { LangItems =>
// in the sense that a crate is not required to have it defined to use it, but a final product
// is required to define it somewhere. Additionally, there are restrictions on crates that use
// a weak lang item, but do not have it defined.
Panic, sym::panic, panic_fn, FunctionId, GenericRequirement::Exact(0);
PanicNounwind, sym::panic_nounwind, panic_nounwind, FunctionId, GenericRequirement::Exact(0);
PanicFmt, sym::panic_fmt, panic_fmt, FunctionId, GenericRequirement::None;
PanicDisplay, sym::panic_display, panic_display, FunctionId, GenericRequirement::None;
ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, FunctionId, GenericRequirement::None;
PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, FunctionId, GenericRequirement::Exact(0);
PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, FunctionId, GenericRequirement::Exact(0);
PanicInfo, sym::panic_info, panic_info, StructId, GenericRequirement::None;
PanicLocation, sym::panic_location, panic_location, StructId, GenericRequirement::None;
PanicImpl, sym::panic_impl, panic_impl, FunctionId, GenericRequirement::None;
PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, FunctionId, GenericRequirement::Exact(0);
PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, FunctionId, GenericRequirement::None;
Panic, sym::panic, FunctionId;
PanicNounwind, sym::panic_nounwind, FunctionId;
PanicFmt, sym::panic_fmt, FunctionId;
PanicDisplay, sym::panic_display, FunctionId;
ConstPanicFmt, sym::const_panic_fmt, FunctionId;
PanicBoundsCheck, sym::panic_bounds_check, FunctionId;
PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, FunctionId;
PanicInfo, sym::panic_info, StructId;
PanicLocation, sym::panic_location, StructId;
PanicImpl, sym::panic_impl, FunctionId;
PanicCannotUnwind, sym::panic_cannot_unwind, FunctionId;
PanicNullPointerDereference, sym::panic_null_pointer_dereference, FunctionId;
/// libstd panic entry point. Necessary for const eval to be able to catch it
BeginPanic, sym::begin_panic, begin_panic_fn, FunctionId, GenericRequirement::None;
BeginPanic, sym::begin_panic, FunctionId;
// Lang items needed for `format_args!()`.
FormatAlignment, sym::format_alignment, format_alignment, EnumId, GenericRequirement::None;
FormatArgument, sym::format_argument, format_argument, StructId, GenericRequirement::None;
FormatArguments, sym::format_arguments, format_arguments, StructId, GenericRequirement::None;
FormatCount, sym::format_count, format_count, EnumId, GenericRequirement::None;
FormatPlaceholder, sym::format_placeholder, format_placeholder, StructId, GenericRequirement::None;
FormatUnsafeArg, sym::format_unsafe_arg, format_unsafe_arg, StructId, GenericRequirement::None;
FormatAlignment, sym::format_alignment, EnumId;
FormatArgument, sym::format_argument, StructId;
FormatArguments, sym::format_arguments, StructId;
FormatCount, sym::format_count, EnumId;
FormatPlaceholder, sym::format_placeholder, StructId;
FormatUnsafeArg, sym::format_unsafe_arg, StructId;
ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, FunctionId, GenericRequirement::None;
BoxFree, sym::box_free, box_free_fn, FunctionId, GenericRequirement::Minimum(1);
DropInPlace, sym::drop_in_place, drop_in_place_fn, FunctionId, GenericRequirement::Minimum(1);
AllocLayout, sym::alloc_layout, alloc_layout, StructId, GenericRequirement::None;
ExchangeMalloc, sym::exchange_malloc, FunctionId;
BoxFree, sym::box_free, FunctionId;
DropInPlace, sym::drop_in_place, FunctionId;
AllocLayout, sym::alloc_layout, StructId;
Start, sym::start, start_fn, FunctionId, GenericRequirement::Exact(1);
Start, sym::start, FunctionId;
EhPersonality, sym::eh_personality, eh_personality, FunctionId, GenericRequirement::None;
EhCatchTypeinfo, sym::eh_catch_typeinfo, eh_catch_typeinfo, StaticId, GenericRequirement::None;
EhPersonality, sym::eh_personality, FunctionId;
EhCatchTypeinfo, sym::eh_catch_typeinfo, StaticId;
OwnedBox, sym::owned_box, owned_box, StructId, GenericRequirement::Minimum(1);
OwnedBox, sym::owned_box, StructId;
PhantomData, sym::phantom_data, phantom_data, StructId, GenericRequirement::Exact(1);
PhantomData, sym::phantom_data, StructId;
ManuallyDrop, sym::manually_drop, manually_drop, StructId, GenericRequirement::None;
ManuallyDrop, sym::manually_drop, StructId;
MaybeUninit, sym::maybe_uninit, maybe_uninit, UnionId, GenericRequirement::None;
MaybeUninit, sym::maybe_uninit, UnionId;
/// Align offset for stride != 1; must not panic.
AlignOffset, sym::align_offset, align_offset_fn, FunctionId, GenericRequirement::None;
AlignOffset, sym::align_offset, FunctionId;
Termination, sym::termination, termination, TraitId, GenericRequirement::None;
Termination, sym::termination, TraitId;
Try, sym::Try, try_trait, TraitId, GenericRequirement::None;
Try, sym::Try, TraitId;
Tuple, sym::tuple_trait, tuple_trait, TraitId, GenericRequirement::Exact(0);
Tuple, sym::tuple_trait, TraitId;
SliceLen, sym::slice_len_fn, slice_len_fn, FunctionId, GenericRequirement::None;
SliceLen, sym::slice_len_fn, FunctionId;
// Language items from AST lowering
TryTraitFromResidual, sym::from_residual, from_residual_fn, FunctionId, GenericRequirement::None;
TryTraitFromOutput, sym::from_output, from_output_fn, FunctionId, GenericRequirement::None;
TryTraitBranch, sym::branch, branch_fn, FunctionId, GenericRequirement::None;
TryTraitFromYeet, sym::from_yeet, from_yeet_fn, FunctionId, GenericRequirement::None;
TryTraitFromResidual, sym::from_residual, FunctionId;
TryTraitFromOutput, sym::from_output, FunctionId;
TryTraitBranch, sym::branch, FunctionId;
TryTraitFromYeet, sym::from_yeet, FunctionId;
PointerLike, sym::pointer_like, pointer_like, TraitId, GenericRequirement::Exact(0);
PointerLike, sym::pointer_like, TraitId;
ConstParamTy, sym::const_param_ty, const_param_ty_trait, TraitId, GenericRequirement::Exact(0);
ConstParamTy, sym::const_param_ty, TraitId;
Poll, sym::Poll, poll, EnumId, GenericRequirement::None;
PollReady, sym::Ready, poll_ready_variant, EnumVariantId, GenericRequirement::None;
PollPending, sym::Pending, poll_pending_variant, EnumVariantId, GenericRequirement::None;
Poll, sym::Poll, EnumId;
PollReady, sym::Ready, EnumVariantId;
PollPending, sym::Pending, EnumVariantId;
// FIXME(swatinem): the following lang items are used for async lowering and
// should become obsolete eventually.
ResumeTy, sym::ResumeTy, resume_ty, StructId, GenericRequirement::None;
GetContext, sym::get_context, get_context_fn, FunctionId, GenericRequirement::None;
ResumeTy, sym::ResumeTy, StructId;
GetContext, sym::get_context, FunctionId;
Context, sym::Context, context, StructId, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, FunctionId, GenericRequirement::None;
FutureOutput, sym::future_output, future_output, TypeAliasId, GenericRequirement::None;
Context, sym::Context, StructId;
FuturePoll, sym::poll, FunctionId;
FutureOutput, sym::future_output, TypeAliasId;
Option, sym::Option, option_type, EnumId, GenericRequirement::None;
OptionSome, sym::Some, option_some_variant, EnumVariantId, GenericRequirement::None;
OptionNone, sym::None, option_none_variant, EnumVariantId, GenericRequirement::None;
Option, sym::Option, EnumId;
OptionSome, sym::Some, EnumVariantId;
OptionNone, sym::None, EnumVariantId;
ResultOk, sym::Ok, result_ok_variant, EnumVariantId, GenericRequirement::None;
ResultErr, sym::Err, result_err_variant, EnumVariantId, GenericRequirement::None;
ResultOk, sym::Ok, EnumVariantId;
ResultErr, sym::Err, EnumVariantId;
ControlFlowContinue, sym::Continue, cf_continue_variant, EnumVariantId, GenericRequirement::None;
ControlFlowBreak, sym::Break, cf_break_variant, EnumVariantId, GenericRequirement::None;
ControlFlowContinue, sym::Continue, EnumVariantId;
ControlFlowBreak, sym::Break, EnumVariantId;
IntoFutureIntoFuture, sym::into_future, into_future_fn, FunctionId, GenericRequirement::None;
IntoIterIntoIter, sym::into_iter, into_iter_fn, FunctionId, GenericRequirement::None;
IteratorNext, sym::next, next_fn, FunctionId, GenericRequirement::None;
Iterator, sym::iterator, iterator, TraitId, GenericRequirement::None;
IntoFutureIntoFuture, sym::into_future, FunctionId;
IntoIterIntoIter, sym::into_iter, FunctionId;
IteratorNext, sym::next, FunctionId;
Iterator, sym::iterator, TraitId;
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, FunctionId, GenericRequirement::None;
PinNewUnchecked, sym::new_unchecked, FunctionId;
RangeFrom, sym::RangeFrom, range_from_struct, StructId, GenericRequirement::None;
RangeFull, sym::RangeFull, range_full_struct, StructId, GenericRequirement::None;
RangeInclusiveStruct, sym::RangeInclusive, range_inclusive_struct, StructId, GenericRequirement::None;
RangeInclusiveNew, sym::range_inclusive_new, range_inclusive_new_method, FunctionId, GenericRequirement::None;
Range, sym::Range, range_struct, StructId, GenericRequirement::None;
RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, StructId, GenericRequirement::None;
RangeTo, sym::RangeTo, range_to_struct, StructId, GenericRequirement::None;
RangeFrom, sym::RangeFrom, StructId;
RangeFull, sym::RangeFull, StructId;
RangeInclusiveStruct, sym::RangeInclusive, StructId;
RangeInclusiveNew, sym::range_inclusive_new, FunctionId;
Range, sym::Range, StructId;
RangeToInclusive, sym::RangeToInclusive, StructId;
RangeTo, sym::RangeTo, StructId;
String, sym::String, string, StructId, GenericRequirement::None;
CStr, sym::CStr, c_str, StructId, GenericRequirement::None;
Ordering, sym::Ordering, ordering, EnumId, GenericRequirement::None;
String, sym::String, StructId;
CStr, sym::CStr, StructId;
Ordering, sym::Ordering, EnumId;
}

View file

@ -108,6 +108,42 @@ fn main() {
);
}
#[test]
fn ty_fragment_followed_by_expr() {
check(
r#"
macro_rules! a {
($t:tt) => {};
}
macro_rules! b {
($t:ty) => {
a!($t);
};
}
fn main() {
b!(&'static str);
}
"#,
expect![[r#"
macro_rules! a {
($t:tt) => {};
}
macro_rules! b {
($t:ty) => {
a!($t);
};
}
fn main() {
a!(&'static str);;
}
"#]],
);
}
#[test]
fn test_winapi_struct() {
// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366

View file

@ -19,7 +19,7 @@ use std::{any::TypeId, iter, ops::Range, sync};
use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
AstId, InFile, MacroCallId, MacroCallKind, MacroKind,
AstId, ExpansionInfo, InFile, MacroCallId, MacroCallKind, MacroKind,
builtin::quote::quote,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
@ -27,7 +27,10 @@ use hir_expand::{
};
use intern::{Symbol, sym};
use itertools::Itertools;
use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
use span::{
Edition, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor,
SyntaxContext,
};
use stdx::{format_to, format_to_acc};
use syntax::{
AstNode, AstPtr,
@ -97,37 +100,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
},
)];
fn resolve(
db: &dyn DefDatabase,
def_map: &DefMap,
ast_id: AstId<ast::MacroCall>,
ast_ptr: InFile<AstPtr<ast::MacroCall>>,
) -> Option<MacroCallId> {
def_map.modules().find_map(|module| {
for decl in
module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into))
{
let body = match decl {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let (body, sm) = db.body_with_source_map(body);
if let Some(it) =
body.blocks(db).find_map(|block| resolve(db, block.1, ast_id, ast_ptr))
{
return Some(it);
}
if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) {
return Some(res);
}
}
module.1.scope.macro_invoc(ast_id)
})
}
let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
let krate = db.fetch_test_crate();
let def_map = crate_def_map(&db, krate);
@ -144,7 +116,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let ast_id = db.ast_id_map(source.file_id).ast_id(&macro_call_node);
let ast_id = InFile::new(source.file_id, ast_id);
let ptr = InFile::new(source.file_id, AstPtr::new(&macro_call_node));
let macro_call_id = resolve(&db, def_map, ast_id, ptr)
let macro_call_id = resolve_macro_call_id(&db, def_map, ast_id, ptr)
.unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}"));
let expansion_result = db.parse_macro_expansion(macro_call_id);
expansions.push((macro_call_node.clone(), expansion_result));
@ -278,6 +250,38 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
fn resolve_macro_call_id(
db: &dyn DefDatabase,
def_map: &DefMap,
ast_id: AstId<ast::MacroCall>,
ast_ptr: InFile<AstPtr<ast::MacroCall>>,
) -> Option<MacroCallId> {
def_map.modules().find_map(|module| {
for decl in
module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into))
{
let body = match decl {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let (body, sm) = db.body_with_source_map(body);
if let Some(it) = body
.blocks(db)
.find_map(|block| resolve_macro_call_id(db, block.1, ast_id, ast_ptr))
{
return Some(it);
}
if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) {
return Some(res);
}
}
module.1.scope.macro_invoc(ast_id)
})
}
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@ -430,3 +434,47 @@ fn regression_20171() {
Edition::CURRENT
});
}
#[test]
fn no_downmap() {
let fixture = r#"
macro_rules! m {
($func_name:ident) => {
fn $func_name() { todo!() }
};
}
m!(f);
m!(g);
"#;
let (db, file_id) = TestDB::with_single_file(fixture);
let krate = file_id.krate(&db);
let def_map = crate_def_map(&db, krate);
let source = def_map[def_map.root].definition_source(&db);
let source_file = match source.value {
ModuleSource::SourceFile(it) => it,
ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
};
let no_downmap_spans: Vec<_> = source_file
.syntax()
.descendants()
.map(|node| {
let mut span = db.real_span_map(file_id).span_for_range(node.text_range());
span.anchor.ast_id = NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER;
span
})
.collect();
for macro_call_node in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let ast_id = db.ast_id_map(source.file_id).ast_id(&macro_call_node);
let ast_id = InFile::new(source.file_id, ast_id);
let ptr = InFile::new(source.file_id, AstPtr::new(&macro_call_node));
let macro_call_id = resolve_macro_call_id(&db, def_map, ast_id, ptr)
.unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}"));
let expansion_info = ExpansionInfo::new(&db, macro_call_id);
for &span in no_downmap_spans.iter() {
assert!(expansion_info.map_range_down(span).is_none());
assert!(expansion_info.map_range_down_exact(span).is_none());
}
}
}

View file

@ -63,7 +63,7 @@ impl DefMap {
return Ok(ResolvedAttr::Other);
}
}
None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }),
None => return Err(UnresolvedMacro { path: (*ast_id.path).clone() }),
};
Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
@ -145,7 +145,7 @@ pub(super) fn derive_macro_as_call_id(
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(&item_attr.path)
.filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
.ok_or_else(|| UnresolvedMacro { path: (*item_attr.path).clone() })?;
let call_id = def_id.make_call(
db,
krate,

View file

@ -1675,7 +1675,7 @@ impl<'db> DefCollector<'db> {
derive_index: *derive_pos as u32,
derive_macro_id: *derive_macro_id,
},
ast_id.path.as_ref().clone(),
(*ast_id.path).clone(),
));
}
// These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
@ -2608,4 +2608,17 @@ foo!(KABOOM);
"#,
);
}
#[test]
fn crate_attrs() {
let fixture = r#"
//- /lib.rs crate:foo crate-attr:recursion_limit="4" crate-attr:no_core crate-attr:no_std crate-attr:feature(register_tool)
"#;
let (db, file_id) = TestDB::with_single_file(fixture);
let def_map = crate_def_map(&db, file_id.krate(&db));
assert_eq!(def_map.recursion_limit(), 4);
assert!(def_map.is_no_core());
assert!(def_map.is_no_std());
assert!(def_map.is_unstable_feature_enabled(&sym::register_tool));
}
}

View file

@ -76,6 +76,7 @@ pub const BAZ: u32 = 0;
None,
Env::default(),
CrateOrigin::Local { repo: None, name: Some(Symbol::intern(crate_name)) },
Vec::new(),
false,
Arc::new(
// FIXME: This is less than ideal
@ -117,6 +118,7 @@ pub const BAZ: u32 = 0;
expect![[r#"
[
"crate_local_def_map",
"file_item_tree_query",
"crate_local_def_map",
]
"#]],

View file

@ -37,7 +37,9 @@ use std::{hash::Hash, ops};
use base_db::Crate;
use either::Either;
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
use span::{
Edition, ErasedFileAstId, FileAstId, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, Span, SyntaxContext,
};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@ -854,6 +856,10 @@ impl ExpansionInfo {
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContext)> + '_>> {
if span.anchor.ast_id == NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER {
return None;
}
let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
});
@ -869,6 +875,10 @@ impl ExpansionInfo {
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContext)> + '_>> {
if span.anchor.ast_id == NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER {
return None;
}
let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
});

View file

@ -197,6 +197,10 @@ impl Name {
pub fn symbol(&self) -> &Symbol {
&self.symbol
}
pub fn is_generated(&self) -> bool {
self.as_str().starts_with("<ra@gennew>")
}
}
struct Display<'a> {

View file

@ -18,6 +18,8 @@ itertools.workspace = true
arrayvec.workspace = true
smallvec.workspace = true
ena = "0.14.3"
serde.workspace = true
serde_derive.workspace = true
either.workspace = true
oorandom = "11.1.5"
tracing = { workspace = true, features = ["attributes"] }

View file

@ -23,8 +23,9 @@ use crate::{
mir::{MirEvalError, MirLowerError},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
ParamEnv, Ty, ValueConst,
ParamEnv, StoredConst, StoredGenericArgs, Ty, ValueConst,
},
traits::StoredParamEnvAndCrate,
};
use super::mir::{interpret_mir, lower_to_mir, pad16};
@ -38,12 +39,12 @@ pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError<'db> {
MirLowerError(MirLowerError<'db>),
MirEvalError(MirEvalError<'db>),
pub enum ConstEvalError {
MirLowerError(MirLowerError),
MirEvalError(MirEvalError),
}
impl ConstEvalError<'_> {
impl ConstEvalError {
pub fn pretty_print(
&self,
f: &mut String,
@ -62,8 +63,8 @@ impl ConstEvalError<'_> {
}
}
impl<'db> From<MirLowerError<'db>> for ConstEvalError<'db> {
fn from(value: MirLowerError<'db>) -> Self {
impl From<MirLowerError> for ConstEvalError {
fn from(value: MirLowerError) -> Self {
match value {
MirLowerError::ConstEvalError(_, e) => *e,
_ => ConstEvalError::MirLowerError(value),
@ -71,8 +72,8 @@ impl<'db> From<MirLowerError<'db>> for ConstEvalError<'db> {
}
}
impl<'db> From<MirEvalError<'db>> for ConstEvalError<'db> {
fn from(value: MirEvalError<'db>) -> Self {
impl From<MirEvalError> for ConstEvalError {
fn from(value: MirEvalError) -> Self {
ConstEvalError::MirEvalError(value)
}
}
@ -85,7 +86,8 @@ pub fn intern_const_ref<'a>(
krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_no_crate(db);
let layout = db.layout_of_ty(ty, ParamEnvAndCrate { param_env: ParamEnv::empty(), krate });
let layout = db
.layout_of_ty(ty.store(), ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }.store());
let kind = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@ -180,10 +182,10 @@ pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<
}
}
pub(crate) fn const_eval_discriminant_variant<'db>(
db: &'db dyn HirDatabase,
pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
) -> Result<i128, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let def = variant_id.into();
let body = db.body(def);
@ -206,8 +208,9 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
let mir_body = db.monomorphized_mir_body(
def,
GenericArgs::new_from_iter(interner, []),
ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) },
GenericArgs::empty(interner).store(),
ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) }
.store(),
)?;
let c = interpret_mir(db, mir_body, false, None)?.0?;
let c = if is_signed {
@ -233,7 +236,7 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd
}
if has_closure(ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr]);
return Const::error(ctx.interner());
}
if let Expr::Path(p) = &ctx.body[expr] {
let mut ctx = TyLoweringContext::new(
@ -252,60 +255,89 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd
{
return result;
}
unknown_const(infer[expr])
Const::error(ctx.interner())
}
pub(crate) fn const_eval_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: ConstId,
_: GenericArgs<'db>,
_: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_discriminant_cycle_result<'db>(
_: &'db dyn HirDatabase,
pub(crate) fn const_eval_discriminant_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_query<'db>(
pub(crate) fn const_eval<'db>(
db: &'db dyn HirDatabase,
def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let body = db.monomorphized_mir_body(
def.into(),
subst,
ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) },
)?;
let c = interpret_mir(db, body, false, trait_env)?.0?;
Ok(c)
) -> Result<Const<'db>, ConstEvalError> {
return match const_eval_query(db, def, subst.store(), trait_env.map(|env| env.store())) {
Ok(konst) => Ok(konst.as_ref()),
Err(err) => Err(err.clone()),
};
#[salsa::tracked(returns(ref), cycle_result = const_eval_cycle_result)]
pub(crate) fn const_eval_query<'db>(
db: &'db dyn HirDatabase,
def: ConstId,
subst: StoredGenericArgs,
trait_env: Option<StoredParamEnvAndCrate>,
) -> Result<StoredConst, ConstEvalError> {
let body = db.monomorphized_mir_body(
def.into(),
subst,
ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) }
.store(),
)?;
let c = interpret_mir(db, body, false, trait_env.as_ref().map(|env| env.as_ref()))?.0?;
Ok(c.store())
}
pub(crate) fn const_eval_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: ConstId,
_: StoredGenericArgs,
_: Option<StoredParamEnvAndCrate>,
) -> Result<StoredConst, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
}
pub(crate) fn const_eval_static_query<'db>(
pub(crate) fn const_eval_static<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::new_from_iter(interner, []),
ParamEnvAndCrate {
param_env: db.trait_environment_for_body(def.into()),
krate: def.krate(db),
},
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c)
) -> Result<Const<'db>, ConstEvalError> {
return match const_eval_static_query(db, def) {
Ok(konst) => Ok(konst.as_ref()),
Err(err) => Err(err.clone()),
};
#[salsa::tracked(returns(ref), cycle_result = const_eval_static_cycle_result)]
pub(crate) fn const_eval_static_query<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<StoredConst, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::empty(interner).store(),
ParamEnvAndCrate {
param_env: db.trait_environment_for_body(def.into()),
krate: def.krate(db),
}
.store(),
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c.store())
}
pub(crate) fn const_eval_static_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: StaticId,
) -> Result<StoredConst, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
}

View file

@ -27,7 +27,7 @@ use super::{
mod intrinsics;
fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> {
fn simplify(e: ConstEvalError) -> ConstEvalError {
match e {
ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
@ -39,7 +39,7 @@ fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> {
#[track_caller]
fn check_fail(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
error: impl FnOnce(ConstEvalError<'_>) -> bool,
error: impl FnOnce(ConstEvalError) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
crate::attach_db(&db, || match eval_goal(&db, file_id) {
@ -104,7 +104,7 @@ fn check_answer(
});
}
fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let display_target =
@ -121,7 +121,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
err
}
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError<'_>> {
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError> {
let _tracing = setup_tracing();
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
@ -142,7 +142,7 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEv
_ => None,
})
.expect("No const named GOAL found in the test");
db.const_eval(const_id, GenericArgs::new_from_iter(interner, []), None)
db.const_eval(const_id, GenericArgs::empty(interner), None)
}
#[test]

View file

@ -19,9 +19,10 @@ use crate::{
lower::{Diagnostics, GenericDefaults},
mir::{BorrowckResult, MirBody, MirLowerError},
next_solver::{
Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, TraitRef, Ty, VariancesOf,
Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, StoredEarlyBinder, StoredGenericArgs,
StoredTy, TraitRef, Ty, VariancesOf,
},
traits::ParamEnvAndCrate,
traits::{ParamEnvAndCrate, StoredParamEnvAndCrate},
};
#[query_group::query_group]
@ -32,60 +33,48 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// and `monomorphized_mir_body_for_closure` into `monomorphized_mir_body`
#[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)]
fn mir_body<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure<'db>(
&'db self,
def: InternedClosureId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)]
fn monomorphized_mir_body<'db>(
&'db self,
fn monomorphized_mir_body(
&self,
def: DefWithBodyId,
subst: GenericArgs<'db>,
env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
subst: StoredGenericArgs,
env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure<'db>(
&'db self,
fn monomorphized_mir_body_for_closure(
&self,
def: InternedClosureId,
subst: GenericArgs<'db>,
env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
subst: StoredGenericArgs,
env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>>;
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)]
#[salsa::invoke(crate::consteval::const_eval)]
#[salsa::transparent]
fn const_eval<'db>(
&'db self,
def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>>;
) -> Result<Const<'db>, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)]
fn const_eval_static<'db>(&'db self, def: StaticId) -> Result<Const<'db>, ConstEvalError<'db>>;
#[salsa::invoke(crate::consteval::const_eval_static)]
#[salsa::transparent]
fn const_eval_static<'db>(&'db self, def: StaticId) -> Result<Const<'db>, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)]
fn const_eval_discriminant<'db>(
&'db self,
def: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>>;
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
#[salsa::transparent]
@ -100,19 +89,19 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::layout::layout_of_adt_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)]
fn layout_of_adt<'db>(
&'db self,
fn layout_of_adt(
&self,
def: AdtId,
args: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
args: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)]
fn layout_of_ty<'db>(
&'db self,
ty: Ty<'db>,
env: ParamEnvAndCrate<'db>,
fn layout_of_ty(
&self,
ty: StoredTy,
env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
@ -125,8 +114,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn ty<'db>(&'db self, def: TyDefId) -> EarlyBinder<'db, Ty<'db>>;
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics)]
#[salsa::transparent]
fn type_for_type_alias_with_diagnostics<'db>(
&'db self,
def: TypeAliasId,
@ -134,11 +123,12 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke(crate::lower::value_ty_query)]
#[salsa::invoke(crate::lower::value_ty)]
#[salsa::transparent]
fn value_ty<'db>(&'db self, def: ValueTyDefId) -> Option<EarlyBinder<'db, Ty<'db>>>;
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics)]
#[salsa::transparent]
fn impl_self_ty_with_diagnostics<'db>(
&'db self,
def: ImplId,
@ -148,9 +138,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn impl_self_ty<'db>(&'db self, def: ImplId) -> EarlyBinder<'db, Ty<'db>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::const_param_ty_with_diagnostics)]
#[salsa::transparent]
fn const_param_ty_with_diagnostics<'db>(&'db self, def: ConstParamId)
-> (Ty<'db>, Diagnostics);
@ -158,7 +147,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> Ty<'db>;
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics)]
#[salsa::transparent]
fn impl_trait_with_diagnostics<'db>(
&'db self,
def: ImplId,
@ -169,19 +159,18 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn impl_trait<'db>(&'db self, def: ImplId) -> Option<EarlyBinder<'db, TraitRef<'db>>>;
#[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics<'db>(
&'db self,
#[salsa::transparent]
fn field_types_with_diagnostics(
&self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>>, Diagnostics);
) -> &(ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>, Diagnostics);
#[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent]
fn field_types<'db>(
&'db self,
var: VariantId,
) -> Arc<ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>>;
fn field_types(&self, var: VariantId) -> &ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>;
#[salsa::invoke(crate::lower::callable_item_signature_query)]
#[salsa::invoke(crate::lower::callable_item_signature)]
#[salsa::transparent]
fn callable_item_signature<'db>(
&'db self,
def: CallableDefId,
@ -191,26 +180,27 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) -> ParamEnv<'db>;
#[salsa::invoke(crate::lower::trait_environment_query)]
#[salsa::invoke(crate::lower::trait_environment)]
#[salsa::transparent]
fn trait_environment<'db>(&'db self, def: GenericDefId) -> ParamEnv<'db>;
#[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)]
fn generic_defaults_with_diagnostics<'db>(
&'db self,
fn generic_defaults_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericDefaults<'db>, Diagnostics);
) -> (GenericDefaults, Diagnostics);
/// This returns an empty list if no parameter has default.
///
/// The binders of the returned defaults are only up to (not including) this parameter.
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::transparent]
fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>;
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
// Interned IDs for solver integration
#[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
@ -219,11 +209,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
// cycle_fn = crate::variance::variances_of_cycle_fn,
// cycle_initial = crate::variance::variances_of_cycle_initial,
cycle_result = crate::variance::variances_of_cycle_initial,
)]
#[salsa::transparent]
fn variances_of<'db>(&'db self, def: GenericDefId) -> VariancesOf<'db>;
}
@ -248,7 +234,7 @@ pub struct InternedConstParamId {
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct InternedOpaqueTyId {
pub loc: ImplTraitId<'db>,
pub loc: ImplTraitId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]

View file

@ -99,7 +99,7 @@ impl BodyValidationDiagnostic {
struct ExprValidator<'db> {
owner: DefWithBodyId,
body: Arc<Body>,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
env: ParamEnv<'db>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
@ -313,7 +313,7 @@ impl<'db> ExprValidator<'db> {
);
value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
}
Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind() {
Expr::Field { expr, .. } => match self.infer.expr_ty(*expr).kind() {
TyKind::Adt(adt, ..) if matches!(adt.def_id().0, AdtId::UnionId(_)) => false,
_ => self.is_known_valid_scrutinee(*expr),
},
@ -554,7 +554,7 @@ impl<'db> FilterMapNextChecker<'db> {
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult<'_>,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -584,7 +584,7 @@ pub fn record_literal_missing_fields(
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult<'_>,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -612,8 +612,8 @@ pub fn record_pattern_missing_fields(
Some((variant_def, missed_fields, exhaustive))
}
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult<'_>) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult<'_>, has_type_mismatches: &mut bool) {
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
None if *has_type_mismatches => (),

View file

@ -16,7 +16,7 @@ use hir_def::{
item_tree::FieldsShape,
};
use hir_expand::name::Name;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use rustc_type_ir::inherent::IntoKind;
use span::Edition;
use stdx::{always, never, variance::PhantomCovariantLifetime};
@ -96,7 +96,7 @@ pub(crate) enum PatKind<'db> {
pub(crate) struct PatCtxt<'a, 'db> {
db: &'db dyn HirDatabase,
infer: &'a InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'a Body,
pub(crate) errors: Vec<PatternError>,
}
@ -104,7 +104,7 @@ pub(crate) struct PatCtxt<'a, 'db> {
impl<'a, 'db> PatCtxt<'a, 'db> {
pub(crate) fn new(
db: &'db dyn HirDatabase,
infer: &'a InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'a Body,
) -> Self {
Self { db, infer, body, errors: Vec::new() }
@ -119,12 +119,15 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
let unadjusted_pat = self.lower_pattern_unadjusted(pat);
self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
unadjusted_pat,
|subpattern, ref_ty| Pat { ty: *ref_ty, kind: Box::new(PatKind::Deref { subpattern }) },
|subpattern, ref_ty| Pat {
ty: ref_ty.as_ref(),
kind: Box::new(PatKind::Deref { subpattern }),
},
)
}
fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat<'db> {
let mut ty = self.infer[pat];
let mut ty = self.infer.pat_ty(pat);
let variant = self.infer.variant_resolution_for_pat(pat);
let kind = match self.body[pat] {
@ -151,7 +154,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = self.infer[id];
ty = self.infer.binding_ty(id);
let name = &self.body[id].name;
match (bm, ty.kind()) {
(BindingMode::Ref(_), TyKind::Ref(_, rty, _)) => ty = rty,
@ -273,7 +276,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
}
fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat<'db> {
let ty = self.infer[pat];
let ty = self.infer.pat_ty(pat);
let pat_from_kind = |kind| Pat { ty, kind: Box::new(kind) };

View file

@ -11,7 +11,7 @@ use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness},
};
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use rustc_type_ir::inherent::{AdtDef, IntoKind};
use smallvec::{SmallVec, smallvec};
use stdx::never;
@ -150,7 +150,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
let fields_len = variant.fields(self.db).fields().len() as u32;
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
let ty = field_tys[fid].instantiate(self.infcx.interner, substs);
let ty = field_tys[fid].get().instantiate(self.infcx.interner, substs);
let ty = self
.infcx
.at(&ObligationCause::dummy(), self.env)

View file

@ -97,9 +97,9 @@ enum UnsafeDiagnostic {
DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock },
}
pub fn unsafe_operations_for_body<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
pub fn unsafe_operations_for_body(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
callback: &mut dyn FnMut(ExprOrPatId),
@ -116,9 +116,9 @@ pub fn unsafe_operations_for_body<'db>(
}
}
pub fn unsafe_operations<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
pub fn unsafe_operations(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
current: ExprId,
@ -136,7 +136,7 @@ pub fn unsafe_operations<'db>(
struct UnsafeVisitor<'db> {
db: &'db dyn HirDatabase,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'db Body,
resolver: Resolver<'db>,
def: DefWithBodyId,
@ -155,7 +155,7 @@ struct UnsafeVisitor<'db> {
impl<'db> UnsafeVisitor<'db> {
fn new(
db: &'db dyn HirDatabase,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'db Body,
def: DefWithBodyId,
unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic),
@ -260,7 +260,7 @@ impl<'db> UnsafeVisitor<'db> {
match pat {
Pat::Record { .. } => {
if let Some((AdtId::UnionId(_), _)) = self.infer[current].as_adt() {
if let Some((AdtId::UnionId(_), _)) = self.infer.pat_ty(current).as_adt() {
let old_inside_union_destructure =
mem::replace(&mut self.inside_union_destructure, true);
self.body.walk_pats_shallow(current, |pat| self.walk_pat(pat));
@ -286,7 +286,7 @@ impl<'db> UnsafeVisitor<'db> {
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
let callee = self.infer[callee];
let callee = self.infer.expr_ty(callee);
if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(func)), _) =
callee.kind()
{
@ -341,7 +341,7 @@ impl<'db> UnsafeVisitor<'db> {
}
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let TyKind::RawPtr(..) = self.infer[*expr].kind() {
if let TyKind::RawPtr(..) = self.infer.expr_ty(*expr).kind() {
self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref);
}
}

View file

@ -38,7 +38,7 @@ use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, CoroutineClosureArgsParts, RegionKind,
Upcast,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _},
inherent::{AdtDef, GenericArgs as _, IntoKind, Term as _, Ty as _, Tys as _},
};
use smallvec::SmallVec;
use span::Edition;
@ -52,9 +52,9 @@ use crate::{
lower::GenericPredicates,
mir::pad16,
next_solver::{
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder,
ExistentialPredicate, FnSig, GenericArg, GenericArgs, ParamEnv, PolyFnSig, Region,
SolverDefId, Term, TraitRef, Ty, TyKind, TypingMode,
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, ExistentialPredicate, FnSig,
GenericArg, GenericArgKind, GenericArgs, ParamEnv, PolyFnSig, Region, SolverDefId,
StoredEarlyBinder, StoredTy, Term, TermKind, TraitRef, Ty, TyKind, TypingMode,
abi::Safety,
infer::{DbInternerInferExt, traits::ObligationCause},
},
@ -602,7 +602,7 @@ impl<'db, T: HirDisplay<'db>> HirDisplay<'db> for &T {
impl<'db, T: HirDisplay<'db> + Internable> HirDisplay<'db> for Interned<T> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
HirDisplay::hir_fmt(self.as_ref(), f)
HirDisplay::hir_fmt(&**self, f)
}
}
@ -664,10 +664,10 @@ fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) ->
impl<'db> HirDisplay<'db> for GenericArg<'db> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
match self {
GenericArg::Ty(ty) => ty.hir_fmt(f),
GenericArg::Lifetime(lt) => lt.hir_fmt(f),
GenericArg::Const(c) => c.hir_fmt(f),
match self.kind() {
GenericArgKind::Type(ty) => ty.hir_fmt(f),
GenericArgKind::Lifetime(lt) => lt.hir_fmt(f),
GenericArgKind::Const(c) => c.hir_fmt(f),
}
}
}
@ -790,7 +790,7 @@ fn render_const_scalar_inner<'db>(
TyKind::Slice(ty) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@ -824,7 +824,7 @@ fn render_const_scalar_inner<'db>(
let Ok(t) = memory_map.vtable_ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>");
};
let Ok(layout) = f.db.layout_of_ty(t, param_env) else {
let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -854,7 +854,7 @@ fn render_const_scalar_inner<'db>(
return f.write_str("<layout-error>");
}
});
let Ok(layout) = f.db.layout_of_ty(t, param_env) else {
let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -866,7 +866,7 @@ fn render_const_scalar_inner<'db>(
}
},
TyKind::Tuple(tys) => {
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@ -878,7 +878,7 @@ fn render_const_scalar_inner<'db>(
f.write_str(", ")?;
}
let offset = layout.fields.offset(id).bytes_usize();
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
f.write_str("<layout-error>")?;
continue;
};
@ -889,7 +889,7 @@ fn render_const_scalar_inner<'db>(
}
TyKind::Adt(def, args) => {
let def = def.def_id().0;
let Ok(layout) = f.db.layout_of_adt(def, args, param_env) else {
let Ok(layout) = f.db.layout_of_adt(def, args.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
match def {
@ -900,7 +900,7 @@ fn render_const_scalar_inner<'db>(
render_variant_after_name(
s.fields(f.db),
f,
&field_types,
field_types,
f.db.trait_environment(def.into()),
&layout,
args,
@ -932,7 +932,7 @@ fn render_const_scalar_inner<'db>(
render_variant_after_name(
var_id.fields(f.db),
f,
&field_types,
field_types,
f.db.trait_environment(def.into()),
var_layout,
args,
@ -952,7 +952,7 @@ fn render_const_scalar_inner<'db>(
let Some(len) = consteval::try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@ -992,7 +992,7 @@ fn render_const_scalar_inner<'db>(
fn render_variant_after_name<'db>(
data: &VariantFields,
f: &mut HirFormatter<'_, 'db>,
field_types: &ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>,
field_types: &'db ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>,
param_env: ParamEnv<'db>,
layout: &Layout,
args: GenericArgs<'db>,
@ -1004,8 +1004,8 @@ fn render_variant_after_name<'db>(
FieldsShape::Record | FieldsShape::Tuple => {
let render_field = |f: &mut HirFormatter<'_, 'db>, id: LocalFieldId| {
let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
let ty = field_types[id].instantiate(f.interner, args);
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let ty = field_types[id].get().instantiate(f.interner, args);
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -1223,7 +1223,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
};
f.end_location_link();
if args.len() > 0 {
if !args.is_empty() {
let generic_def_id = GenericDefId::from_callable(db, def);
let generics = generics(db, generic_def_id);
let (parent_len, self_param, type_, const_, impl_, lifetime) =
@ -1459,7 +1459,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
};
let coroutine_sig = coroutine_sig.skip_binder();
let coroutine_inputs = coroutine_sig.inputs();
let TyKind::Tuple(coroutine_inputs) = coroutine_inputs.as_slice()[1].kind() else {
let TyKind::Tuple(coroutine_inputs) = coroutine_inputs[1].kind() else {
unreachable!("invalid coroutine closure signature");
};
let TyKind::Tuple(coroutine_output) = coroutine_sig.output().kind() else {
@ -1787,9 +1787,9 @@ impl<'db> HirDisplay<'db> for PolyFnSig<'db> {
impl<'db> HirDisplay<'db> for Term<'db> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
match self {
Term::Ty(it) => it.hir_fmt(f),
Term::Const(it) => it.hir_fmt(f),
match self.kind() {
TermKind::Ty(it) => it.hir_fmt(f),
TermKind::Const(it) => it.hir_fmt(f),
}
}
}
@ -1942,7 +1942,7 @@ fn write_bounds_like_dyn_trait<'db>(
let own_args = projection.projection_term.own_args(f.interner);
if !own_args.is_empty() {
write!(f, "<")?;
hir_fmt_generic_arguments(f, own_args.as_slice(), None)?;
hir_fmt_generic_arguments(f, own_args, None)?;
write!(f, ">")?;
}
write!(f, " = ")?;

View file

@ -2,7 +2,7 @@
use hir_def::{AdtId, signatures::StructFlags};
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use rustc_type_ir::inherent::{AdtDef, IntoKind};
use stdx::never;
use crate::{
@ -85,7 +85,7 @@ fn has_drop_glue_impl<'db>(
.map(|(_, field_ty)| {
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
field_ty.get().instantiate(infcx.interner, subst),
env,
visited,
)
@ -105,7 +105,7 @@ fn has_drop_glue_impl<'db>(
.map(|(_, field_ty)| {
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
field_ty.get().instantiate(infcx.interner, subst),
env,
visited,
)

View file

@ -10,8 +10,7 @@ use hir_def::{
use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _,
Upcast, elaborate,
inherent::{IntoKind, SliceLike},
Upcast, elaborate, inherent::IntoKind,
};
use smallvec::SmallVec;
@ -329,13 +328,9 @@ where
}
let sig = db.callable_item_signature(func.into());
if sig
.skip_binder()
.inputs()
.iter()
.skip(1)
.any(|ty| contains_illegal_self_type_reference(db, trait_, &ty, AllowSelfProjection::Yes))
{
if sig.skip_binder().inputs().iter().skip(1).any(|ty| {
contains_illegal_self_type_reference(db, trait_, ty.skip_binder(), AllowSelfProjection::Yes)
}) {
cb(MethodViolationCode::ReferencesSelfInput)?;
}
@ -412,11 +407,11 @@ fn receiver_is_dispatchable<'db>(
// `self: Self` can't be dispatched on, but this is already considered dyn-compatible
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
if sig.inputs().iter().next().is_some_and(|p| p.skip_binder() == self_param_ty) {
if sig.inputs().iter().next().is_some_and(|p| *p.skip_binder() == self_param_ty) {
return true;
}
let Some(&receiver_ty) = sig.inputs().skip_binder().as_slice().first() else {
let Some(&receiver_ty) = sig.inputs().skip_binder().first() else {
return false;
};

View file

@ -28,7 +28,7 @@ mod path;
mod place_op;
pub(crate) mod unify;
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
use std::{cell::OnceCell, convert::identity, iter};
use base_db::Crate;
use either::Either;
@ -47,14 +47,12 @@ use hir_expand::{mod_path::ModPath, name::Name};
use indexmap::IndexSet;
use intern::sym;
use la_arena::ArenaMap;
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
inherent::{AdtDef, IntoKind, Ty as _},
};
use salsa::Update;
use span::Edition;
use stdx::never;
use thin_vec::ThinVec;
@ -74,10 +72,10 @@ use crate::{
method_resolution::{CandidateId, MethodResolutionUnstableFeatures},
mir::MirSpan,
next_solver::{
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind,
Tys,
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region,
StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys,
abi::Safety,
infer::{InferCtxt, traits::ObligationCause},
infer::{InferCtxt, ObligationInspector, traits::ObligationCause},
},
traits::FnTrait,
utils::TargetFeatureIsSafeInTarget,
@ -95,12 +93,24 @@ use cast::{CastCheck, CastError};
pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> {
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult {
infer_query_with_inspect(db, def, None)
}
pub fn infer_query_with_inspect<'db>(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
inspect: Option<ObligationInspector<'db>>,
) -> InferenceResult {
let _p = tracing::info_span!("infer_query").entered();
let resolver = def.resolver(db);
let body = db.body(def);
let mut ctx = InferenceContext::new(db, def, &body, resolver);
if let Some(inspect) = inspect {
ctx.table.infer_ctxt.attach_obligation_inspector(inspect);
}
match def {
DefWithBodyId::FunctionId(f) => {
ctx.collect_fn(f);
@ -110,23 +120,23 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_>
DefWithBodyId::VariantId(v) => {
ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
hir_def::layout::IntegerType::Pointer(signed) => match signed {
true => ctx.types.isize,
false => ctx.types.usize,
true => ctx.types.types.isize,
false => ctx.types.types.usize,
},
hir_def::layout::IntegerType::Fixed(size, signed) => match signed {
true => match size {
Integer::I8 => ctx.types.i8,
Integer::I16 => ctx.types.i16,
Integer::I32 => ctx.types.i32,
Integer::I64 => ctx.types.i64,
Integer::I128 => ctx.types.i128,
Integer::I8 => ctx.types.types.i8,
Integer::I16 => ctx.types.types.i16,
Integer::I32 => ctx.types.types.i32,
Integer::I64 => ctx.types.types.i64,
Integer::I128 => ctx.types.types.i128,
},
false => match size {
Integer::I8 => ctx.types.u8,
Integer::I16 => ctx.types.u16,
Integer::I32 => ctx.types.u32,
Integer::I64 => ctx.types.u64,
Integer::I128 => ctx.types.u128,
Integer::I8 => ctx.types.types.u8,
Integer::I16 => ctx.types.types.u16,
Integer::I32 => ctx.types.types.u32,
Integer::I64 => ctx.types.types.u64,
Integer::I128 => ctx.types.types.u128,
},
},
};
@ -162,7 +172,7 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_>
ctx.resolve_all()
}
fn infer_cycle_result(db: &dyn HirDatabase, _: DefWithBodyId) -> InferenceResult<'_> {
fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult {
InferenceResult {
has_errors: true,
..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed))
@ -196,8 +206,8 @@ pub enum InferenceTyDiagnosticSource {
Signature,
}
#[derive(Debug, PartialEq, Eq, Clone, Update)]
pub enum InferenceDiagnostic<'db> {
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum InferenceDiagnostic {
NoSuchField {
field: ExprOrPatId,
private: Option<LocalFieldId>,
@ -213,16 +223,16 @@ pub enum InferenceDiagnostic<'db> {
},
UnresolvedField {
expr: ExprId,
receiver: Ty<'db>,
receiver: StoredTy,
name: Name,
method_with_same_name_exists: bool,
},
UnresolvedMethodCall {
expr: ExprId,
receiver: Ty<'db>,
receiver: StoredTy,
name: Name,
/// Contains the type the field resolves to
field_with_same_name: Option<Ty<'db>>,
field_with_same_name: Option<StoredTy>,
assoc_func_with_same_name: Option<FunctionId>,
},
UnresolvedAssocItem {
@ -249,21 +259,21 @@ pub enum InferenceDiagnostic<'db> {
},
ExpectedFunction {
call_expr: ExprId,
found: Ty<'db>,
found: StoredTy,
},
TypedHole {
expr: ExprId,
expected: Ty<'db>,
expected: StoredTy,
},
CastToUnsized {
expr: ExprId,
cast_ty: Ty<'db>,
cast_ty: StoredTy,
},
InvalidCast {
expr: ExprId,
error: CastError,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
expr_ty: StoredTy,
cast_ty: StoredTy,
},
TyDiagnostic {
source: InferenceTyDiagnosticSource,
@ -290,10 +300,10 @@ pub enum InferenceDiagnostic<'db> {
}
/// A mismatch between an expected and an inferred type.
#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)]
pub struct TypeMismatch<'db> {
pub expected: Ty<'db>,
pub actual: Ty<'db>,
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeMismatch {
pub expected: StoredTy,
pub actual: StoredTy,
}
/// Represents coercing a value to a different type of value.
@ -336,20 +346,23 @@ pub struct TypeMismatch<'db> {
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)]
pub struct Adjustment<'db> {
#[type_visitable(ignore)]
#[type_foldable(identity)]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Adjustment {
pub kind: Adjust,
pub target: Ty<'db>,
pub target: StoredTy,
}
impl<'db> Adjustment<'db> {
pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self {
impl Adjustment {
pub fn borrow<'db>(
interner: DbInterner<'db>,
m: Mutability,
ty: Ty<'db>,
lt: Region<'db>,
) -> Self {
let ty = Ty::new_ref(interner, lt, ty, m);
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))),
target: ty,
target: ty.store(),
}
}
}
@ -473,56 +486,47 @@ pub enum PointerCast {
/// When you add a field that stores types (including `Substitution` and the like), don't forget
/// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must
/// not appear in the final inference result.
#[derive(Clone, PartialEq, Eq, Debug, Update)]
pub struct InferenceResult<'db> {
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct InferenceResult {
/// For each method call expr, records the function it resolves to.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
method_resolutions: FxHashMap<ExprId, (FunctionId, GenericArgs<'db>)>,
method_resolutions: FxHashMap<ExprId, (FunctionId, StoredGenericArgs)>,
/// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>,
/// For each struct literal or pattern, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, GenericArgs<'db>)>,
assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, StoredGenericArgs)>,
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
tuple_field_access_types: ThinVec<Tys<'db>>,
tuple_field_access_types: ThinVec<StoredTys>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) type_of_expr: ArenaMap<ExprId, Ty<'db>>,
pub(crate) type_of_expr: ArenaMap<ExprId, StoredTy>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))]
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))]
pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
pub(crate) type_of_pat: ArenaMap<PatId, StoredTy>,
pub(crate) type_of_binding: ArenaMap<BindingId, StoredTy>,
pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, StoredTy>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, StoredTy>,
pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch<'db>>>>,
pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch>>>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
// `TyKind::Error`.
// Which will then mark this field.
pub(crate) has_errors: bool,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
diagnostics: ThinVec<InferenceDiagnostic<'db>>,
diagnostics: ThinVec<InferenceDiagnostic>,
/// Interned `Error` type to return references to.
// FIXME: Remove this.
error_ty: Ty<'db>,
error_ty: StoredTy,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty<'db>>>,
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<StoredTy>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@ -538,7 +542,7 @@ pub struct InferenceResult<'db> {
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem<'db>>, FnTrait)>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
@ -546,15 +550,15 @@ pub struct InferenceResult<'db> {
}
#[salsa::tracked]
impl<'db> InferenceResult<'db> {
impl InferenceResult {
#[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)]
pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> {
pub fn for_body(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult {
infer_query(db, def)
}
}
impl<'db> InferenceResult<'db> {
fn new(error_ty: Ty<'db>) -> Self {
impl InferenceResult {
fn new(error_ty: Ty<'_>) -> Self {
Self {
method_resolutions: Default::default(),
field_resolutions: Default::default(),
@ -569,7 +573,7 @@ impl<'db> InferenceResult<'db> {
type_of_opaque: Default::default(),
type_mismatches: Default::default(),
has_errors: Default::default(),
error_ty,
error_ty: error_ty.store(),
pat_adjustments: Default::default(),
binding_modes: Default::default(),
expr_adjustments: Default::default(),
@ -579,8 +583,8 @@ impl<'db> InferenceResult<'db> {
}
}
pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> {
self.method_resolutions.get(&expr).copied()
pub fn method_resolution<'db>(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> {
self.method_resolutions.get(&expr).map(|(func, args)| (*func, args.as_ref()))
}
pub fn field_resolution(&self, expr: ExprId) -> Option<Either<FieldId, TupleFieldId>> {
self.field_resolutions.get(&expr).copied()
@ -597,16 +601,19 @@ impl<'db> InferenceResult<'db> {
ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id),
}
}
pub fn assoc_resolutions_for_expr(
pub fn assoc_resolutions_for_expr<'db>(
&self,
id: ExprId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref()))
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
pub fn assoc_resolutions_for_pat<'db>(
&self,
id: PatId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref()))
}
pub fn assoc_resolutions_for_expr_or_pat(
pub fn assoc_resolutions_for_expr_or_pat<'db>(
&self,
id: ExprOrPatId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
@ -615,20 +622,20 @@ impl<'db> InferenceResult<'db> {
ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id),
}
}
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> {
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
self.type_mismatches.as_deref()?.get(&expr.into())
}
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> {
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
self.type_mismatches.as_deref()?.get(&pat.into())
}
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch<'db>)> {
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> {
self.type_mismatches
.as_deref()
.into_iter()
.flatten()
.map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
}
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch<'db>)> {
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
self.type_mismatches.as_deref().into_iter().flatten().filter_map(
|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
@ -636,22 +643,22 @@ impl<'db> InferenceResult<'db> {
},
)
}
pub fn placeholder_types(&self) -> impl Iterator<Item = (TypeRefId, &Ty<'db>)> {
self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty))
pub fn placeholder_types<'db>(&self) -> impl Iterator<Item = (TypeRefId, Ty<'db>)> {
self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty.as_ref()))
}
pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(&type_ref).copied()
pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref())
}
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) {
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem>, FnTrait) {
self.closure_info.get(&closure).unwrap()
}
pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option<Ty<'db>> {
pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option<Ty<'db>> {
match id {
ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).copied(),
ExprOrPatId::PatId(id) => self.type_of_pat.get(id).copied(),
ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()),
ExprOrPatId::PatId(id) => self.type_of_pat.get(id).map(|it| it.as_ref()),
}
}
pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<Ty<'db>> {
pub fn type_of_expr_with_adjust<'db>(&self, id: ExprId) -> Option<Ty<'db>> {
match self.expr_adjustments.get(&id).and_then(|adjustments| {
adjustments.iter().rfind(|adj| {
// https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140
@ -660,37 +667,37 @@ impl<'db> InferenceResult<'db> {
Adjustment {
kind: Adjust::NeverToAny,
target,
} if target.is_never()
} if target.as_ref().is_never()
)
})
}) {
Some(adjustment) => Some(adjustment.target),
None => self.type_of_expr.get(id).copied(),
Some(adjustment) => Some(adjustment.target.as_ref()),
None => self.type_of_expr.get(id).map(|it| it.as_ref()),
}
}
pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<Ty<'db>> {
pub fn type_of_pat_with_adjust<'db>(&self, id: PatId) -> Option<Ty<'db>> {
match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
Some(adjusted) => Some(*adjusted),
None => self.type_of_pat.get(id).copied(),
Some(adjusted) => Some(adjusted.as_ref()),
None => self.type_of_pat.get(id).map(|it| it.as_ref()),
}
}
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] {
pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
&self.diagnostics
}
pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> {
self.tuple_field_access_types[id.0 as usize]
pub fn tuple_field_access_type<'db>(&self, id: TupleId) -> Tys<'db> {
self.tuple_field_access_types[id.0 as usize].as_ref()
}
pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> {
pub fn pat_adjustment(&self, id: PatId) -> Option<&[StoredTy]> {
self.pat_adjustments.get(&id).map(|it| &**it)
}
pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment<'db>]> {
pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
self.expr_adjustments.get(&id).map(|it| &**it)
}
@ -699,135 +706,47 @@ impl<'db> InferenceResult<'db> {
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn expression_types(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> {
self.type_of_expr.iter().map(|(k, v)| (k, *v))
pub fn expression_types<'db>(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> {
self.type_of_expr.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn pattern_types(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> {
self.type_of_pat.iter().map(|(k, v)| (k, *v))
pub fn pattern_types<'db>(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> {
self.type_of_pat.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn binding_types(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> {
self.type_of_binding.iter().map(|(k, v)| (k, *v))
pub fn binding_types<'db>(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> {
self.type_of_binding.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn return_position_impl_trait_types(
&self,
pub fn return_position_impl_trait_types<'db>(
&'db self,
db: &'db dyn HirDatabase,
) -> impl Iterator<Item = (ImplTraitIdx<'db>, Ty<'db>)> {
self.type_of_opaque.iter().filter_map(move |(&id, &ty)| {
) -> impl Iterator<Item = (ImplTraitIdx, Ty<'db>)> {
self.type_of_opaque.iter().filter_map(move |(&id, ty)| {
let ImplTraitId::ReturnTypeImplTrait(_, rpit_idx) = id.loc(db) else {
return None;
};
Some((rpit_idx, ty))
Some((rpit_idx, ty.as_ref()))
})
}
}
impl<'db> Index<ExprId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, expr: ExprId) -> &Ty<'db> {
self.type_of_expr.get(expr).unwrap_or(&self.error_ty)
pub fn expr_ty<'db>(&self, id: ExprId) -> Ty<'db> {
self.type_of_expr.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
impl<'db> Index<PatId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, pat: PatId) -> &Ty<'db> {
self.type_of_pat.get(pat).unwrap_or(&self.error_ty)
pub fn pat_ty<'db>(&self, id: PatId) -> Ty<'db> {
self.type_of_pat.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
impl<'db> Index<ExprOrPatId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, id: ExprOrPatId) -> &Ty<'db> {
match id {
ExprOrPatId::ExprId(id) => &self[id],
ExprOrPatId::PatId(id) => &self[id],
}
pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> {
self.type_of_expr_or_pat(id).unwrap_or(self.error_ty.as_ref())
}
}
impl<'db> Index<BindingId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, b: BindingId) -> &Ty<'db> {
self.type_of_binding.get(b).unwrap_or(&self.error_ty)
}
}
#[derive(Debug, Clone)]
struct InternedStandardTypes<'db> {
unit: Ty<'db>,
never: Ty<'db>,
char: Ty<'db>,
bool: Ty<'db>,
i8: Ty<'db>,
i16: Ty<'db>,
i32: Ty<'db>,
i64: Ty<'db>,
i128: Ty<'db>,
isize: Ty<'db>,
u8: Ty<'db>,
u16: Ty<'db>,
u32: Ty<'db>,
u64: Ty<'db>,
u128: Ty<'db>,
usize: Ty<'db>,
f16: Ty<'db>,
f32: Ty<'db>,
f64: Ty<'db>,
f128: Ty<'db>,
static_str_ref: Ty<'db>,
error: Ty<'db>,
re_static: Region<'db>,
re_error: Region<'db>,
re_erased: Region<'db>,
empty_args: GenericArgs<'db>,
}
impl<'db> InternedStandardTypes<'db> {
fn new(interner: DbInterner<'db>) -> Self {
let str = Ty::new(interner, rustc_type_ir::TyKind::Str);
let re_static = Region::new_static(interner);
Self {
unit: Ty::new_unit(interner),
never: Ty::new(interner, TyKind::Never),
char: Ty::new(interner, TyKind::Char),
bool: Ty::new(interner, TyKind::Bool),
i8: Ty::new_int(interner, rustc_type_ir::IntTy::I8),
i16: Ty::new_int(interner, rustc_type_ir::IntTy::I16),
i32: Ty::new_int(interner, rustc_type_ir::IntTy::I32),
i64: Ty::new_int(interner, rustc_type_ir::IntTy::I64),
i128: Ty::new_int(interner, rustc_type_ir::IntTy::I128),
isize: Ty::new_int(interner, rustc_type_ir::IntTy::Isize),
u8: Ty::new_uint(interner, rustc_type_ir::UintTy::U8),
u16: Ty::new_uint(interner, rustc_type_ir::UintTy::U16),
u32: Ty::new_uint(interner, rustc_type_ir::UintTy::U32),
u64: Ty::new_uint(interner, rustc_type_ir::UintTy::U64),
u128: Ty::new_uint(interner, rustc_type_ir::UintTy::U128),
usize: Ty::new_uint(interner, rustc_type_ir::UintTy::Usize),
f16: Ty::new_float(interner, rustc_type_ir::FloatTy::F16),
f32: Ty::new_float(interner, rustc_type_ir::FloatTy::F32),
f64: Ty::new_float(interner, rustc_type_ir::FloatTy::F64),
f128: Ty::new_float(interner, rustc_type_ir::FloatTy::F128),
static_str_ref: Ty::new_ref(interner, re_static, str, Mutability::Not),
error: Ty::new_error(interner, ErrorGuaranteed),
re_static,
re_error: Region::error(interner),
re_erased: Region::new_erased(interner),
empty_args: GenericArgs::new_from_iter(interner, []),
}
pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> {
self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
@ -848,7 +767,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
pub(crate) lang_items: &'db LangItems,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult<'db>,
pub(crate) result: InferenceResult,
tuple_field_accesses_rev:
IndexSet<Tys<'db>, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>,
/// The return type of the function being inferred, the closure or async block if we're
@ -865,7 +784,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
resume_yield_tys: Option<(Ty<'db>, Ty<'db>)>,
diverges: Diverges,
breakables: Vec<BreakableContext<'db>>,
types: InternedStandardTypes<'db>,
types: &'db crate::next_solver::DefaultAny<'db>,
/// Whether we are inside the pattern of a destructuring assignment.
inside_assignment: bool,
@ -873,7 +792,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
deferred_cast_checks: Vec<CastCheck<'db>>,
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy<'db>>,
current_captures: Vec<CapturedItemWithoutTy>,
/// A stack that has an entry for each projection in the current capture.
///
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
@ -886,7 +805,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
closure_dependencies: FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
deferred_closures: FxHashMap<InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>>,
diagnostics: Diagnostics<'db>,
diagnostics: Diagnostics,
}
#[derive(Clone, Debug)]
@ -942,10 +861,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
) -> Self {
let trait_env = db.trait_environment_for_body(owner);
let table = unify::InferenceTable::new(db, trait_env, resolver.krate(), Some(owner));
let types = InternedStandardTypes::new(table.interner());
let types = crate::next_solver::default_types(db);
InferenceContext {
result: InferenceResult::new(types.error),
return_ty: types.error, // set in collect_* calls
result: InferenceResult::new(types.types.error),
return_ty: types.types.error, // set in collect_* calls
types,
target_features: OnceCell::new(),
unstable_features: MethodResolutionUnstableFeatures::from_def_map(
@ -1008,7 +927,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
/// Clones `self` and calls `resolve_all()` on it.
// FIXME: Remove this.
pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult<'db> {
pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult {
let mut ctx = self.clone();
ctx.type_inference_fallback();
@ -1032,7 +951,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
fn resolve_all(self) -> InferenceResult<'db> {
fn resolve_all(self) -> InferenceResult {
let InferenceContext {
mut table, mut result, tuple_field_accesses_rev, diagnostics, ..
} = self;
@ -1066,23 +985,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
} = &mut result;
for ty in type_of_expr.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_expr.shrink_to_fit();
for ty in type_of_pat.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_pat.shrink_to_fit();
for ty in type_of_binding.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_binding.shrink_to_fit();
for ty in type_of_type_placeholder.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_type_placeholder.shrink_to_fit();
type_of_opaque.shrink_to_fit();
@ -1090,8 +1009,8 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
if let Some(type_mismatches) = type_mismatches {
*has_errors = true;
for mismatch in type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected);
mismatch.actual = table.resolve_completely(mismatch.actual);
mismatch.expected = table.resolve_completely(mismatch.expected.as_ref()).store();
mismatch.actual = table.resolve_completely(mismatch.actual.as_ref()).store();
}
type_mismatches.shrink_to_fit();
}
@ -1101,23 +1020,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
ExpectedFunction { found: ty, .. }
| UnresolvedField { receiver: ty, .. }
| UnresolvedMethodCall { receiver: ty, .. } => {
*ty = table.resolve_completely(*ty);
*ty = table.resolve_completely(ty.as_ref()).store();
// FIXME: Remove this when we are on par with rustc in terms of inference
if ty.references_non_lt_error() {
if ty.as_ref().references_non_lt_error() {
return false;
}
if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic
&& let Some(ty) = field_with_same_name
{
*ty = table.resolve_completely(*ty);
if ty.references_non_lt_error() {
*ty = table.resolve_completely(ty.as_ref()).store();
if ty.as_ref().references_non_lt_error() {
*field_with_same_name = None;
}
}
}
TypedHole { expected: ty, .. } => {
*ty = table.resolve_completely(*ty);
*ty = table.resolve_completely(ty.as_ref()).store();
}
_ => (),
}
@ -1125,30 +1044,33 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
});
diagnostics.shrink_to_fit();
for (_, subst) in method_resolutions.values_mut() {
*subst = table.resolve_completely(*subst);
*has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error());
*subst = table.resolve_completely(subst.as_ref()).store();
*has_errors =
*has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error());
}
method_resolutions.shrink_to_fit();
for (_, subst) in assoc_resolutions.values_mut() {
*subst = table.resolve_completely(*subst);
*has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error());
*subst = table.resolve_completely(subst.as_ref()).store();
*has_errors =
*has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error());
}
assoc_resolutions.shrink_to_fit();
for adjustment in expr_adjustments.values_mut().flatten() {
adjustment.target = table.resolve_completely(adjustment.target);
*has_errors = *has_errors || adjustment.target.references_non_lt_error();
adjustment.target = table.resolve_completely(adjustment.target.as_ref()).store();
*has_errors = *has_errors || adjustment.target.as_ref().references_non_lt_error();
}
expr_adjustments.shrink_to_fit();
for adjustment in pat_adjustments.values_mut().flatten() {
*adjustment = table.resolve_completely(*adjustment);
*has_errors = *has_errors || adjustment.references_non_lt_error();
*adjustment = table.resolve_completely(adjustment.as_ref()).store();
*has_errors = *has_errors || adjustment.as_ref().references_non_lt_error();
}
pat_adjustments.shrink_to_fit();
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.map(|subst| table.resolve_completely(subst))
.map(|subst| table.resolve_completely(subst).store())
.inspect(|subst| {
*has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error());
*has_errors =
*has_errors || subst.as_ref().iter().any(|ty| ty.references_non_lt_error());
})
.collect();
result.tuple_field_access_types.shrink_to_fit();
@ -1174,7 +1096,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
data.type_ref,
&data.store,
InferenceTyDiagnosticSource::Signature,
LifetimeElisionKind::Elided(self.types.re_static),
LifetimeElisionKind::Elided(self.types.regions.statik),
);
self.return_ty = return_ty;
@ -1232,7 +1154,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
);
self.process_user_written_ty(return_ty)
}
None => self.types.unit,
None => self.types.types.unit,
};
self.return_coercion = Some(CoerceMany::new(self.return_ty));
@ -1262,10 +1184,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty<'db>) {
self.result.type_of_expr.insert(expr, ty);
self.result.type_of_expr.insert(expr, ty.store());
}
pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) {
if adjustments.is_empty() {
return;
}
@ -1278,7 +1200,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
) => {
// NeverToAny coercion can target any type, so instead of adding a new
// adjustment on top we can change the target.
*target = *new_target;
*target = new_target.clone();
}
_ => {
*entry.get_mut() = adjustments;
@ -1291,7 +1213,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
}
fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty<'db>]>) {
fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[StoredTy]>) {
if adjustments.is_empty() {
return;
}
@ -1304,7 +1226,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
func: FunctionId,
subst: GenericArgs<'db>,
) {
self.result.method_resolutions.insert(expr, (func, subst));
self.result.method_resolutions.insert(expr, (func, subst.store()));
}
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
@ -1317,22 +1239,22 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
item: CandidateId,
subs: GenericArgs<'db>,
) {
self.result.assoc_resolutions.insert(id, (item, subs));
self.result.assoc_resolutions.insert(id, (item, subs.store()));
}
fn write_pat_ty(&mut self, pat: PatId, ty: Ty<'db>) {
self.result.type_of_pat.insert(pat, ty);
self.result.type_of_pat.insert(pat, ty.store());
}
fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) {
self.result.type_of_type_placeholder.insert(type_ref, ty);
self.result.type_of_type_placeholder.insert(type_ref, ty.store());
}
fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) {
self.result.type_of_binding.insert(id, ty);
self.result.type_of_binding.insert(id, ty.store());
}
pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) {
self.diagnostics.push(diagnostic);
}
@ -1429,7 +1351,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn err_ty(&self) -> Ty<'db> {
self.types.error
self.types.types.error
}
pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> {
@ -1486,7 +1408,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
match ty.kind() {
TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 {
AdtId::StructId(struct_id) => {
match self.db.field_types(struct_id.into()).values().next_back().copied() {
match self
.db
.field_types(struct_id.into())
.values()
.next_back()
.map(|it| it.get())
{
Some(field) => {
ty = field.instantiate(self.interner(), substs);
}
@ -1547,7 +1475,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result
.type_mismatches
.get_or_insert_default()
.insert(id, TypeMismatch { expected, actual });
.insert(id, TypeMismatch { expected: expected.store(), actual: actual.store() });
}
result
}
@ -1588,11 +1516,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
if let Err(_err) = result {
// FIXME: Emit diagnostic.
}
result.unwrap_or(self.types.error)
result.unwrap_or(self.types.types.error)
}
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result[expr]
self.result.expr_ty(expr)
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
@ -1600,7 +1528,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
if let Some(it) = self.result.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
ty = Some(it.target.as_ref());
}
ty.unwrap_or_else(|| self.expr_ty(e))
}
@ -1820,7 +1748,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
result
} else {
// FIXME diagnostic
(ctx.types.error, None)
(ctx.types.types.error, None)
}
}
}

View file

@ -25,7 +25,7 @@ impl<'db> InferenceTable<'db> {
}
impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment>> {
let steps = self.steps();
if steps.is_empty() {
return InferOk { obligations: PredicateObligations::new(), value: vec![] };
@ -42,7 +42,10 @@ impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
}
})
.zip(targets)
.map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
.map(|(autoderef, target)| Adjustment {
kind: Adjust::Deref(autoderef),
target: target.store(),
})
.collect();
InferOk { obligations: self.take_obligations(), value: steps }

View file

@ -4,7 +4,7 @@ use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
Flags, InferTy, TypeFlags, UintTy,
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use stdx::never;
@ -83,8 +83,13 @@ impl CastError {
expr: ExprId,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
) -> InferenceDiagnostic<'db> {
InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty }
) -> InferenceDiagnostic {
InferenceDiagnostic::InvalidCast {
expr,
error: self,
expr_ty: expr_ty.store(),
cast_ty: cast_ty.store(),
}
}
}
@ -109,7 +114,7 @@ impl<'db> CastCheck<'db> {
pub(super) fn check(
&mut self,
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<(), InferenceDiagnostic<'db>> {
) -> Result<(), InferenceDiagnostic> {
self.expr_ty = ctx.table.try_structurally_resolve_type(self.expr_ty);
self.cast_ty = ctx.table.try_structurally_resolve_type(self.cast_ty);
@ -137,7 +142,7 @@ impl<'db> CastCheck<'db> {
{
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty,
cast_ty: self.cast_ty.store(),
});
}
@ -393,8 +398,9 @@ fn pointer_kind<'db>(
let struct_data = id.fields(ctx.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
ctx.db.field_types(id.into())[last_field].instantiate(ctx.interner(), subst);
let last_field_ty = ctx.db.field_types(id.into())[last_field]
.get()
.instantiate(ctx.interner(), subst);
pointer_kind(last_field_ty, ctx)
} else {
Ok(Some(PointerKind::Thin))

View file

@ -13,7 +13,7 @@ use rustc_type_ir::{
ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, CoroutineClosureArgs,
CoroutineClosureArgsParts, Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
TypeVisitor,
inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _},
inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, Ty as _},
};
use tracing::debug;
@ -22,9 +22,8 @@ use crate::{
db::{InternedClosure, InternedCoroutine},
infer::{BreakableKind, Diverges, coerce::CoerceMany},
next_solver::{
AliasTy, Binder, BoundRegionKind, BoundVarKind, BoundVarKinds, ClauseKind, DbInterner,
ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig, PolyProjectionPredicate, Predicate,
PredicateKind, SolverDefId, Ty, TyKind,
AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig,
PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind,
abi::Safety,
infer::{
BoundRegionConversionTime, InferOk, InferResult,
@ -73,16 +72,17 @@ impl<'db> InferenceContext<'_, 'db> {
let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into());
// FIXME: Make this an infer var and infer it later.
let tupled_upvars_ty = self.types.unit;
let tupled_upvars_ty = self.types.types.unit;
let (id, ty, resume_yield_tys) = match closure_kind {
ClosureKind::Coroutine(_) => {
let yield_ty = self.table.next_ty_var();
let resume_ty = liberated_sig.inputs().get(0).unwrap_or(self.types.unit);
let resume_ty =
liberated_sig.inputs().first().copied().unwrap_or(self.types.types.unit);
// FIXME: Infer the upvars later.
let parts = CoroutineArgsParts {
parent_args,
kind_ty: self.types.unit,
parent_args: parent_args.as_slice(),
kind_ty: self.types.types.unit,
resume_ty,
yield_ty,
return_ty: body_ret_ty,
@ -119,7 +119,7 @@ impl<'db> InferenceContext<'_, 'db> {
};
// FIXME: Infer the kind later if needed.
let parts = ClosureArgsParts {
parent_args,
parent_args: parent_args.as_slice(),
closure_kind_ty: Ty::from_closure_kind(
interner,
expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn),
@ -140,9 +140,9 @@ impl<'db> InferenceContext<'_, 'db> {
// async closures always return the type ascribed after the `->` (if present),
// and yield `()`.
let bound_return_ty = bound_sig.skip_binder().output();
let bound_yield_ty = self.types.unit;
let bound_yield_ty = self.types.types.unit;
// rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems.
let resume_ty = self.types.unit;
let resume_ty = self.types.types.unit;
// FIXME: Infer the kind later if needed.
let closure_kind_ty = Ty::from_closure_kind(
@ -155,26 +155,26 @@ impl<'db> InferenceContext<'_, 'db> {
let coroutine_captures_by_ref_ty = Ty::new_fn_ptr(
interner,
Binder::bind_with_vars(
interner.mk_fn_sig([], self.types.unit, false, Safety::Safe, FnAbi::Rust),
BoundVarKinds::new_from_iter(
interner,
[BoundVarKind::Region(BoundRegionKind::ClosureEnv)],
interner.mk_fn_sig(
[],
self.types.types.unit,
false,
Safety::Safe,
FnAbi::Rust,
),
self.types.coroutine_captures_by_ref_bound_var_kinds,
),
);
let closure_args = CoroutineClosureArgs::new(
interner,
CoroutineClosureArgsParts {
parent_args,
parent_args: parent_args.as_slice(),
closure_kind_ty,
signature_parts_ty: Ty::new_fn_ptr(
interner,
bound_sig.map_bound(|sig| {
interner.mk_fn_sig(
[
resume_ty,
Ty::new_tup_from_iter(interner, sig.inputs().iter()),
],
[resume_ty, Ty::new_tup(interner, sig.inputs())],
Ty::new_tup(interner, &[bound_yield_ty, bound_return_ty]),
sig.c_variadic,
sig.safety,
@ -195,7 +195,7 @@ impl<'db> InferenceContext<'_, 'db> {
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(bound_sig.skip_binder().inputs()) {
self.infer_top_pat(*arg_pat, arg_ty, None);
self.infer_top_pat(*arg_pat, *arg_ty, None);
}
// FIXME: lift these out into a struct
@ -668,7 +668,7 @@ impl<'db> InferenceContext<'_, 'db> {
assert!(!expected_sig.skip_binder().has_vars_bound_above(rustc_type_ir::INNERMOST));
let bound_sig = expected_sig.map_bound(|sig| {
self.interner().mk_fn_sig(
sig.inputs(),
sig.inputs().iter().copied(),
sig.output(),
sig.c_variadic,
Safety::Safe,
@ -744,9 +744,10 @@ impl<'db> InferenceContext<'_, 'db> {
// The liberated version of this signature should be a subtype
// of the liberated form of the expectation.
for (supplied_ty, expected_ty) in
iter::zip(supplied_sig.inputs(), expected_sigs.liberated_sig.inputs())
{
for (supplied_ty, expected_ty) in iter::zip(
supplied_sig.inputs().iter().copied(),
expected_sigs.liberated_sig.inputs().iter().copied(),
) {
// Check that E' = S'.
let cause = ObligationCause::new();
let InferOk { value: (), obligations } =
@ -765,7 +766,8 @@ impl<'db> InferenceContext<'_, 'db> {
let inputs = supplied_sig
.inputs()
.into_iter()
.iter()
.copied()
.map(|ty| table.infer_ctxt.resolve_vars_if_possible(ty));
expected_sigs.liberated_sig = table.interner().mk_fn_sig(

View file

@ -15,7 +15,7 @@ use hir_def::{
};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
@ -25,21 +25,21 @@ use crate::{
db::{HirDatabase, InternedClosure, InternedClosureId},
infer::InferenceContext,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
next_solver::{DbInterner, EarlyBinder, GenericArgs, Ty, TyKind},
next_solver::{DbInterner, GenericArgs, StoredEarlyBinder, StoredTy, Ty, TyKind},
traits::FnTrait,
};
// The below functions handle capture and closure kind (Fn, FnMut, ..)
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub(crate) struct HirPlace<'db> {
pub(crate) struct HirPlace {
pub(crate) local: BindingId,
pub(crate) projections: Vec<ProjectionElem<'db, Infallible>>,
pub(crate) projections: Vec<ProjectionElem<Infallible>>,
}
impl<'db> HirPlace<'db> {
fn ty(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local]);
impl HirPlace {
fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local));
for p in &self.projections {
ty = p.projected_ty(
&ctx.table.infer_ctxt,
@ -78,8 +78,8 @@ pub enum CaptureKind {
}
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct CapturedItem<'db> {
pub(crate) place: HirPlace<'db>,
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
///
@ -88,11 +88,10 @@ pub struct CapturedItem<'db> {
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
#[update(unsafe(with(crate::utils::unsafe_update_eq)))]
pub(crate) ty: EarlyBinder<'db, Ty<'db>>,
pub(crate) ty: StoredEarlyBinder<StoredTy>,
}
impl<'db> CapturedItem<'db> {
impl CapturedItem {
pub fn local(&self) -> BindingId {
self.place.local
}
@ -102,9 +101,9 @@ impl<'db> CapturedItem<'db> {
self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
}
pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_no_crate(db);
self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args)
self.ty.get().instantiate(interner, subst.split_closure_args_untupled().parent_args)
}
pub fn kind(&self) -> CaptureKind {
@ -273,15 +272,15 @@ impl<'db> CapturedItem<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CapturedItemWithoutTy<'db> {
pub(crate) place: HirPlace<'db>,
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
}
impl<'db> CapturedItemWithoutTy<'db> {
fn with_ty(self, ctx: &mut InferenceContext<'_, 'db>) -> CapturedItem<'db> {
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_, '_>) -> CapturedItem {
let ty = self.place.ty(ctx);
let ty = match &self.kind {
CaptureKind::ByValue => ty,
@ -290,20 +289,20 @@ impl<'db> CapturedItemWithoutTy<'db> {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
Ty::new_ref(ctx.interner(), ctx.types.re_error, ty, m)
Ty::new_ref(ctx.interner(), ctx.types.regions.error, ty, m)
}
};
CapturedItem {
place: self.place,
kind: self.kind,
span_stacks: self.span_stacks,
ty: EarlyBinder::bind(ty),
ty: StoredEarlyBinder::bind(ty.store()),
}
}
}
impl<'db> InferenceContext<'_, 'db> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let adjustments =
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
@ -311,7 +310,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace<'db>> {
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
if path.type_anchor().is_some() {
return None;
}
@ -332,7 +331,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
self.current_capture_span_stack.clear();
match &self.body[tgt_expr] {
Expr::Path(p) => {
@ -367,7 +366,7 @@ impl<'db> InferenceContext<'_, 'db> {
None
}
fn push_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
self.current_captures.push(CapturedItemWithoutTy {
place,
kind,
@ -375,11 +374,7 @@ impl<'db> InferenceContext<'_, 'db> {
});
}
fn truncate_capture_spans(
&self,
capture: &mut CapturedItemWithoutTy<'db>,
mut truncate_to: usize,
) {
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
// The first span is the identifier, and it must always remain.
truncate_to += 1;
for span_stack in &mut capture.span_stacks {
@ -404,14 +399,14 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
if self.is_upvar(&place) {
self.push_capture(place, kind);
}
@ -427,7 +422,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(
place,
@ -444,7 +439,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace<'db>) {
fn consume_place(&mut self, place: HirPlace) {
if self.is_upvar(&place) {
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
@ -456,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment<'db>]) {
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
if let Some((last, rest)) = adjustment.split_last() {
match &last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
@ -477,12 +472,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn ref_capture_with_adjusts(
&mut self,
m: Mutability,
tgt_expr: ExprId,
rest: &[Adjustment<'db>],
) {
fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
let capture_kind = match m {
Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
@ -780,7 +770,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id]) {
if self.is_ty_copy(self.result.binding_ty(*id)) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
} else {
update_result(CaptureKind::ByValue);
@ -798,7 +788,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
fn is_upvar(&self, place: &HirPlace<'db>) -> bool {
fn is_upvar(&self, place: &HirPlace) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
return self.body.is_binding_upvar(place.local, root);
@ -830,7 +820,7 @@ impl<'db> InferenceContext<'_, 'db> {
// FIXME: Borrow checker problems without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
let mut ty = self.table.resolve_completely(self.result[capture.place.local]);
let mut ty = self.table.resolve_completely(self.result.binding_ty(capture.place.local));
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, 0);
@ -875,7 +865,7 @@ impl<'db> InferenceContext<'_, 'db> {
fn minimize_captures(&mut self) {
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
let mut hash_map = FxHashMap::<HirPlace<'db>, usize>::default();
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let result = mem::take(&mut self.current_captures);
for mut item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
@ -910,7 +900,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn consume_with_pat(&mut self, mut place: HirPlace<'db>, tgt_pat: PatId) {
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
@ -921,7 +911,7 @@ impl<'db> InferenceContext<'_, 'db> {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result[tgt_pat].kind() {
let field_count = match self.result.pat_ty(tgt_pat).kind() {
TyKind::Tuple(s) => s.len(),
_ => break 'reset_span_stack,
};
@ -1221,11 +1211,11 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Call this only when the last span in the stack isn't a split.
fn apply_adjusts_to_place<'db>(
fn apply_adjusts_to_place(
current_capture_span_stack: &mut Vec<MirSpan>,
mut r: HirPlace<'db>,
adjustments: &[Adjustment<'db>],
) -> Option<HirPlace<'db>> {
mut r: HirPlace,
adjustments: &[Adjustment],
) -> Option<HirPlace> {
let span = *current_capture_span_stack.last().expect("empty capture span stack");
for adj in adjustments {
match &adj.kind {

View file

@ -104,7 +104,7 @@ struct Coerce<D> {
cause: ObligationCause,
}
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment<'db>>, Ty<'db>)>;
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment>, Ty<'db>)>;
/// Coercing a mutable reference to an immutable works, while
/// coercing `&T` to `&mut T` should be forbidden.
@ -114,7 +114,7 @@ fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateRes
/// This always returns `Ok(...)`.
fn success<'db>(
adj: Vec<Adjustment<'db>>,
adj: Vec<Adjustment>,
target: Ty<'db>,
obligations: PredicateObligations<'db>,
) -> CoerceResult<'db> {
@ -206,14 +206,17 @@ where
&mut self,
a: Ty<'db>,
b: Ty<'db>,
adjustments: impl IntoIterator<Item = Adjustment<'db>>,
adjustments: impl IntoIterator<Item = Adjustment>,
final_adjustment: Adjust,
) -> CoerceResult<'db> {
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
success(
adjustments
.into_iter()
.chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment }))
.chain(std::iter::once(Adjustment {
target: ty.store(),
kind: final_adjustment,
}))
.collect(),
ty,
obligations,
@ -237,7 +240,7 @@ where
if self.coerce_never {
return success(
vec![Adjustment { kind: Adjust::NeverToAny, target: b }],
vec![Adjustment { kind: Adjust::NeverToAny, target: b.store() }],
b,
PredicateObligations::new(),
);
@ -532,7 +535,8 @@ where
// Now apply the autoref.
let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase);
adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty });
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty.store() });
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
@ -635,10 +639,10 @@ where
let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(None), target: ty_a.store() },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b).store(),
},
))
}
@ -646,16 +650,16 @@ where
coerce_mutbls(mt_a, mt_b)?;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(None), target: ty_a.store() },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)),
target: Ty::new_ptr(self.interner(), ty_a, mt_b),
target: Ty::new_ptr(self.interner(), ty_a, mt_b).store(),
},
))
}
_ => None,
};
let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target);
let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target.as_ref());
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
@ -726,7 +730,7 @@ where
Ok(None) => {
if trait_pred.def_id().0 == unsize_did {
let self_ty = trait_pred.self_ty();
let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty();
let unsize_ty = trait_pred.trait_ref.args[1].expect_ty();
debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
match (self_ty.kind(), unsize_ty.kind()) {
(TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..))
@ -815,7 +819,7 @@ where
b,
adjustment.map(|kind| Adjustment {
kind,
target: Ty::new_fn_ptr(this.interner(), fn_ty_a),
target: Ty::new_fn_ptr(this.interner(), fn_ty_a).store(),
}),
Adjust::Pointer(PointerCast::UnsafeFnPointer),
)
@ -955,7 +959,7 @@ where
self.unify_and(
a_raw,
b,
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }],
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty.store() }],
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
)
} else if mt_a.mutbl != mutbl_b {
@ -1170,12 +1174,15 @@ impl<'db> InferenceContext<'_, 'db> {
for &expr in exprs {
self.write_expr_adj(
expr,
Box::new([Adjustment { kind: prev_adjustment.clone(), target: fn_ptr }]),
Box::new([Adjustment {
kind: prev_adjustment.clone(),
target: fn_ptr.store(),
}]),
);
}
self.write_expr_adj(
new,
Box::new([Adjustment { kind: next_adjustment, target: fn_ptr }]),
Box::new([Adjustment { kind: next_adjustment, target: fn_ptr.store() }]),
);
return Ok(fn_ptr);
}
@ -1390,7 +1397,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
icx,
cause,
expr,
icx.types.unit,
icx.types.types.unit,
true,
label_unit_as_expected,
expr_is_read,
@ -1505,14 +1512,14 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
// emit or provide suggestions on how to fix the initial error.
icx.set_tainted_by_errors();
self.final_ty = Some(icx.types.error);
self.final_ty = Some(icx.types.types.error);
icx.result.type_mismatches.get_or_insert_default().insert(
expression.into(),
if label_expression_as_expected {
TypeMismatch { expected: found, actual: expected }
TypeMismatch { expected: found.store(), actual: expected.store() }
} else {
TypeMismatch { expected, actual: found }
TypeMismatch { expected: expected.store(), actual: found.store() }
},
);
}
@ -1528,7 +1535,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
// If we only had inputs that were of type `!` (or no
// inputs at all), then the final type is `!`.
assert_eq!(self.pushed, 0);
icx.types.never
icx.types.types.never
}
}
}
@ -1570,7 +1577,7 @@ fn coerce<'db>(
db: &'db dyn HirDatabase,
env: ParamEnvAndCrate<'db>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
) -> Result<(Vec<Adjustment>, Ty<'db>), TypeError<DbInterner<'db>>> {
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);
@ -1593,7 +1600,6 @@ fn coerce<'db>(
let mut ocx = ObligationCtxt::new(&infcx);
let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok);
_ = ocx.try_evaluate_obligations();
let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty));
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
@ -1701,10 +1707,18 @@ fn coerce<'db>(
}
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`.
let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver {
interner,
debruijn: DebruijnIndex::ZERO,
var_values: vars.var_values,
});
let mut resolver =
Resolver { interner, debruijn: DebruijnIndex::ZERO, var_values: vars.var_values };
let ty = infcx.resolve_vars_if_possible(ty).fold_with(&mut resolver);
let adjustments = adjustments
.into_iter()
.map(|adjustment| Adjustment {
kind: adjustment.kind,
target: infcx
.resolve_vars_if_possible(adjustment.target.as_ref())
.fold_with(&mut resolver)
.store(),
})
.collect();
Ok((adjustments, ty))
}

View file

@ -25,10 +25,10 @@ use crate::{
// to our resolver and so we cannot have mutable reference, but we really want to have
// ability to dispatch diagnostics during this work otherwise the code becomes a complete mess.
#[derive(Debug, Default, Clone)]
pub(super) struct Diagnostics<'db>(RefCell<ThinVec<InferenceDiagnostic<'db>>>);
pub(super) struct Diagnostics(RefCell<ThinVec<InferenceDiagnostic>>);
impl<'db> Diagnostics<'db> {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) {
impl Diagnostics {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic) {
self.0.borrow_mut().push(diagnostic);
}
@ -42,19 +42,19 @@ impl<'db> Diagnostics<'db> {
);
}
pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic<'db>> {
pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic> {
self.0.into_inner()
}
}
pub(crate) struct PathDiagnosticCallbackData<'a, 'db> {
pub(crate) struct PathDiagnosticCallbackData<'a> {
node: ExprOrPatId,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
}
pub(super) struct InferenceTyLoweringContext<'db, 'a> {
ctx: TyLoweringContext<'db, 'a>,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
source: InferenceTyDiagnosticSource,
}
@ -64,7 +64,7 @@ impl<'db, 'a> InferenceTyLoweringContext<'db, 'a> {
db: &'db dyn HirDatabase,
resolver: &'a Resolver<'db>,
store: &'a ExpressionStore,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
source: InferenceTyDiagnosticSource,
generic_def: GenericDefId,
lifetime_elision: LifetimeElisionKind<'db>,

View file

@ -17,7 +17,7 @@ use hir_expand::name::Name;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
CoroutineArgs, CoroutineArgsParts, InferTy, Interner,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef, GenericArgs as _, IntoKind, Ty as _},
};
use syntax::ast::RangeOp;
use tracing::debug;
@ -35,7 +35,7 @@ use crate::{
lower::{GenericPredicates, lower_mutability},
method_resolution::{self, CandidateId, MethodCallee, MethodError},
next_solver::{
ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError,
ErrorGuaranteed, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError,
infer::{
BoundRegionConversionTime, InferOk,
traits::{Obligation, ObligationCause},
@ -68,10 +68,10 @@ impl<'db> InferenceContext<'_, 'db> {
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
let could_unify = self.unify(ty, expected_ty);
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
tgt_expr.into(),
TypeMismatch { expected: expected_ty.store(), actual: ty.store() },
);
}
}
ty
@ -98,10 +98,10 @@ impl<'db> InferenceContext<'_, 'db> {
match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) {
Ok(res) => res,
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: target, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: target.store(), actual: ty.store() },
);
target
}
}
@ -276,7 +276,7 @@ impl<'db> InferenceContext<'_, 'db> {
if ty.is_never() {
if let Some(adjustments) = self.result.expr_adjustments.get(&expr) {
return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments {
*target
target.as_ref()
} else {
self.err_ty()
};
@ -292,10 +292,10 @@ impl<'db> InferenceContext<'_, 'db> {
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
let could_unify = self.unify(ty, expected_ty);
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: expected_ty.store(), actual: ty.store() },
);
}
}
ty
@ -319,7 +319,7 @@ impl<'db> InferenceContext<'_, 'db> {
let expected = &expected.adjust_for_branches(&mut self.table);
self.infer_expr_coerce_never(
condition,
&Expectation::HasType(self.types.bool),
&Expectation::HasType(self.types.types.bool),
ExprIsRead::Yes,
);
@ -375,7 +375,7 @@ impl<'db> InferenceContext<'_, 'db> {
input_ty,
Some(DeclContext { origin: DeclOrigin::LetExpr }),
);
self.types.bool
self.types.types.bool
}
Expr::Block { statements, tail, label, id: _ } => {
self.infer_block(tgt_expr, statements, *tail, *label, expected)
@ -400,7 +400,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.with_breakable_ctx(BreakableKind::Loop, Some(ty), label, |this| {
this.infer_expr(
body,
&Expectation::HasType(this.types.unit),
&Expectation::HasType(this.types.types.unit),
ExprIsRead::Yes,
);
});
@ -410,7 +410,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.diverges = Diverges::Maybe;
breaks
}
None => self.types.never,
None => self.types.types.never,
}
}
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self
@ -451,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> {
if arms.is_empty() {
self.diverges = Diverges::Always;
self.types.never
self.types.types.never
} else {
let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let mut all_arms_diverge = Diverges::Always;
@ -463,7 +463,7 @@ impl<'db> InferenceContext<'_, 'db> {
let result_ty = match &expected {
// We don't coerce to `()` so that if the match expression is a
// statement it's branches can have any consistent type.
Expectation::HasType(ty) if *ty != self.types.unit => *ty,
Expectation::HasType(ty) if *ty != self.types.types.unit => *ty,
_ => self.table.next_ty_var(),
};
let mut coerce = CoerceMany::new(result_ty);
@ -473,7 +473,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.diverges = Diverges::Maybe;
self.infer_expr_coerce_never(
guard_expr,
&Expectation::HasType(self.types.bool),
&Expectation::HasType(self.types.types.bool),
ExprIsRead::Yes,
);
}
@ -504,7 +504,7 @@ impl<'db> InferenceContext<'_, 'db> {
bad_value_break: false,
});
};
self.types.never
self.types.types.never
}
&Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = expr {
@ -528,7 +528,7 @@ impl<'db> InferenceContext<'_, 'db> {
ExprIsRead::Yes,
)
} else {
self.types.unit
self.types.types.unit
};
match find_breakable(&mut self.breakables, label) {
@ -558,7 +558,7 @@ impl<'db> InferenceContext<'_, 'db> {
});
}
}
self.types.never
self.types.types.never
}
&Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr),
&Expr::Become { expr } => self.infer_expr_become(expr),
@ -571,7 +571,7 @@ impl<'db> InferenceContext<'_, 'db> {
ExprIsRead::Yes,
);
} else {
let unit = self.types.unit;
let unit = self.types.types.unit;
let _ = self.coerce(
tgt_expr.into(),
unit,
@ -583,14 +583,14 @@ impl<'db> InferenceContext<'_, 'db> {
resume_ty
} else {
// FIXME: report error (yield expr in non-coroutine)
self.types.error
self.types.types.error
}
}
Expr::Yeet { expr } => {
if let &Some(expr) = expr {
self.infer_expr_no_expect(expr, ExprIsRead::Yes);
}
self.types.never
self.types.types.never
}
Expr::RecordLit { path, fields, spread, .. } => {
let (ty, def_id) = self.resolve_variant(tgt_expr.into(), path.as_deref(), false);
@ -599,7 +599,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.unify(ty, t);
}
let substs = ty.as_adt().map(|(_, s)| s).unwrap_or(self.types.empty_args);
let substs = ty.as_adt().map(|(_, s)| s).unwrap_or(self.types.empty.generic_args);
if let Some(variant) = def_id {
self.write_variant_resolution(tgt_expr.into(), variant);
}
@ -637,7 +637,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
};
let field_ty = field_def.map_or(self.err_ty(), |it| {
field_types[it].instantiate(self.interner(), &substs)
field_types[it].get().instantiate(self.interner(), &substs)
});
// Field type might have some unknown types
@ -768,7 +768,7 @@ impl<'db> InferenceContext<'_, 'db> {
// assignments into blocks.
self.table.new_maybe_never_var()
} else {
self.types.unit
self.types.types.unit
}
}
Expr::Range { lhs, rhs, range_type } => {
@ -780,12 +780,14 @@ impl<'db> InferenceContext<'_, 'db> {
Ty::new_adt(
self.interner(),
adt,
GenericArgs::new_from_iter(self.interner(), [ty.into()]),
GenericArgs::new_from_slice(&[GenericArg::from(ty)]),
)
};
match (range_type, lhs_ty, rhs_ty) {
(RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
Some(adt) => Ty::new_adt(self.interner(), adt, self.types.empty_args),
Some(adt) => {
Ty::new_adt(self.interner(), adt, self.types.empty.generic_args)
}
None => self.err_ty(),
},
(RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
@ -834,7 +836,7 @@ impl<'db> InferenceContext<'_, 'db> {
trait_element_ty
}
// FIXME: Report an error.
None => self.types.error,
None => self.types.types.error,
}
}
Expr::Tuple { exprs, .. } => {
@ -859,10 +861,10 @@ impl<'db> InferenceContext<'_, 'db> {
}
Expr::Array(array) => self.infer_expr_array(array, expected),
Expr::Literal(lit) => match lit {
Literal::Bool(..) => self.types.bool,
Literal::String(..) => self.types.static_str_ref,
Literal::Bool(..) => self.types.types.bool,
Literal::String(..) => self.types.types.static_str_ref,
Literal::ByteString(bs) => {
let byte_type = self.types.u8;
let byte_type = self.types.types.u8;
let len = consteval::usize_const(
self.db,
@ -871,35 +873,46 @@ impl<'db> InferenceContext<'_, 'db> {
);
let array_type = Ty::new_array_with_const_len(self.interner(), byte_type, len);
Ty::new_ref(self.interner(), self.types.re_static, array_type, Mutability::Not)
Ty::new_ref(
self.interner(),
self.types.regions.statik,
array_type,
Mutability::Not,
)
}
Literal::CString(..) => Ty::new_ref(
self.interner(),
self.types.re_static,
self.types.regions.statik,
self.lang_items.CStr.map_or_else(
|| self.err_ty(),
|strukt| Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args),
|strukt| {
Ty::new_adt(
self.interner(),
strukt.into(),
self.types.empty.generic_args,
)
},
),
Mutability::Not,
),
Literal::Char(..) => self.types.char,
Literal::Char(..) => self.types.types.char,
Literal::Int(_v, ty) => match ty {
Some(int_ty) => match int_ty {
hir_def::builtin_type::BuiltinInt::Isize => self.types.isize,
hir_def::builtin_type::BuiltinInt::I8 => self.types.i8,
hir_def::builtin_type::BuiltinInt::I16 => self.types.i16,
hir_def::builtin_type::BuiltinInt::I32 => self.types.i32,
hir_def::builtin_type::BuiltinInt::I64 => self.types.i64,
hir_def::builtin_type::BuiltinInt::I128 => self.types.i128,
hir_def::builtin_type::BuiltinInt::Isize => self.types.types.isize,
hir_def::builtin_type::BuiltinInt::I8 => self.types.types.i8,
hir_def::builtin_type::BuiltinInt::I16 => self.types.types.i16,
hir_def::builtin_type::BuiltinInt::I32 => self.types.types.i32,
hir_def::builtin_type::BuiltinInt::I64 => self.types.types.i64,
hir_def::builtin_type::BuiltinInt::I128 => self.types.types.i128,
},
None => {
let expected_ty = expected.to_option(&mut self.table);
tracing::debug!(?expected_ty);
let opt_ty = match expected_ty.as_ref().map(|it| it.kind()) {
Some(TyKind::Int(_) | TyKind::Uint(_)) => expected_ty,
Some(TyKind::Char) => Some(self.types.u8),
Some(TyKind::Char) => Some(self.types.types.u8),
Some(TyKind::RawPtr(..) | TyKind::FnDef(..) | TyKind::FnPtr(..)) => {
Some(self.types.usize)
Some(self.types.types.usize)
}
_ => None,
};
@ -908,20 +921,20 @@ impl<'db> InferenceContext<'_, 'db> {
},
Literal::Uint(_v, ty) => match ty {
Some(int_ty) => match int_ty {
hir_def::builtin_type::BuiltinUint::Usize => self.types.usize,
hir_def::builtin_type::BuiltinUint::U8 => self.types.u8,
hir_def::builtin_type::BuiltinUint::U16 => self.types.u16,
hir_def::builtin_type::BuiltinUint::U32 => self.types.u32,
hir_def::builtin_type::BuiltinUint::U64 => self.types.u64,
hir_def::builtin_type::BuiltinUint::U128 => self.types.u128,
hir_def::builtin_type::BuiltinUint::Usize => self.types.types.usize,
hir_def::builtin_type::BuiltinUint::U8 => self.types.types.u8,
hir_def::builtin_type::BuiltinUint::U16 => self.types.types.u16,
hir_def::builtin_type::BuiltinUint::U32 => self.types.types.u32,
hir_def::builtin_type::BuiltinUint::U64 => self.types.types.u64,
hir_def::builtin_type::BuiltinUint::U128 => self.types.types.u128,
},
None => {
let expected_ty = expected.to_option(&mut self.table);
let opt_ty = match expected_ty.as_ref().map(|it| it.kind()) {
Some(TyKind::Int(_) | TyKind::Uint(_)) => expected_ty,
Some(TyKind::Char) => Some(self.types.u8),
Some(TyKind::Char) => Some(self.types.types.u8),
Some(TyKind::RawPtr(..) | TyKind::FnDef(..) | TyKind::FnPtr(..)) => {
Some(self.types.usize)
Some(self.types.types.usize)
}
_ => None,
};
@ -930,10 +943,10 @@ impl<'db> InferenceContext<'_, 'db> {
},
Literal::Float(_v, ty) => match ty {
Some(float_ty) => match float_ty {
hir_def::builtin_type::BuiltinFloat::F16 => self.types.f16,
hir_def::builtin_type::BuiltinFloat::F32 => self.types.f32,
hir_def::builtin_type::BuiltinFloat::F64 => self.types.f64,
hir_def::builtin_type::BuiltinFloat::F128 => self.types.f128,
hir_def::builtin_type::BuiltinFloat::F16 => self.types.types.f16,
hir_def::builtin_type::BuiltinFloat::F32 => self.types.types.f32,
hir_def::builtin_type::BuiltinFloat::F64 => self.types.types.f64,
hir_def::builtin_type::BuiltinFloat::F128 => self.types.types.f128,
},
None => {
let opt_ty = expected
@ -947,10 +960,13 @@ impl<'db> InferenceContext<'_, 'db> {
// Underscore expression is an error, we render a specialized diagnostic
// to let the user know what type is expected though.
let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty());
self.push_diagnostic(InferenceDiagnostic::TypedHole { expr: tgt_expr, expected });
self.push_diagnostic(InferenceDiagnostic::TypedHole {
expr: tgt_expr,
expected: expected.store(),
});
expected
}
Expr::OffsetOf(_) => self.types.usize,
Expr::OffsetOf(_) => self.types.types.usize,
Expr::InlineAsm(asm) => {
let check_expr_asm_operand = |this: &mut Self, expr, is_input: bool| {
let ty = this.infer_expr_no_expect(expr, ExprIsRead::Yes);
@ -1011,7 +1027,7 @@ impl<'db> InferenceContext<'_, 'db> {
AsmOperand::Label(expr) => {
self.infer_expr(
expr,
&Expectation::HasType(self.types.unit),
&Expectation::HasType(self.types.types.unit),
ExprIsRead::No,
);
}
@ -1021,7 +1037,7 @@ impl<'db> InferenceContext<'_, 'db> {
// FIXME: `sym` should report for things that are not functions or statics.
AsmOperand::Sym(_) => (),
});
if diverge { self.types.never } else { self.types.unit }
if diverge { self.types.types.never } else { self.types.types.unit }
}
};
// use a new type variable if we got unknown here
@ -1143,7 +1159,7 @@ impl<'db> InferenceContext<'_, 'db> {
oprnd_t = ty;
} else {
// FIXME: Report an error.
oprnd_t = self.types.error;
oprnd_t = self.types.types.error;
}
}
UnaryOp::Not => {
@ -1183,10 +1199,10 @@ impl<'db> InferenceContext<'_, 'db> {
match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) {
Ok(res) => res,
Err(_) => {
this.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty });
this.result.type_mismatches.get_or_insert_default().insert(
tgt_expr.into(),
TypeMismatch { expected: target.store(), actual: ty.store() },
);
target
}
}
@ -1216,14 +1232,14 @@ impl<'db> InferenceContext<'_, 'db> {
CoroutineArgs::new(
self.interner(),
CoroutineArgsParts {
parent_args,
kind_ty: self.types.unit,
parent_args: parent_args.as_slice(),
kind_ty: self.types.types.unit,
// rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems.
resume_ty: self.types.unit,
yield_ty: self.types.unit,
resume_ty: self.types.types.unit,
yield_ty: self.types.types.unit,
return_ty: inner_ty,
// FIXME: Infer upvars.
tupled_upvars_ty: self.types.unit,
tupled_upvars_ty: self.types.types.unit,
},
)
.args,
@ -1234,7 +1250,7 @@ impl<'db> InferenceContext<'_, 'db> {
&mut self,
fn_x: FnTrait,
derefed_callee: Ty<'db>,
adjustments: &mut Vec<Adjustment<'db>>,
adjustments: &mut Vec<Adjustment>,
callee_ty: Ty<'db>,
params: &[Ty<'db>],
tgt_expr: ExprId,
@ -1249,7 +1265,8 @@ impl<'db> InferenceContext<'_, 'db> {
.unwrap_or(true)
{
// prefer reborrow to move
adjustments.push(Adjustment { kind: Adjust::Deref(None), target: inner });
adjustments
.push(Adjustment { kind: Adjust::Deref(None), target: inner.store() });
adjustments.push(Adjustment::borrow(
self.interner(),
Mutability::Mut,
@ -1282,13 +1299,10 @@ impl<'db> InferenceContext<'_, 'db> {
};
let trait_data = trait_.trait_items(self.db);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = GenericArgs::new_from_iter(
self.interner(),
[
callee_ty.into(),
Ty::new_tup_from_iter(self.interner(), params.iter().copied()).into(),
],
);
let subst = GenericArgs::new_from_slice(&[
callee_ty.into(),
Ty::new_tup(self.interner(), params).into(),
]);
self.write_method_resolution(tgt_expr, func, subst);
}
}
@ -1332,7 +1346,7 @@ impl<'db> InferenceContext<'_, 'db> {
&Expectation::has_type(elem_ty),
ExprIsRead::Yes,
);
let usize = self.types.usize;
let usize = self.types.types.usize;
let len = match self.body[repeat] {
Expr::Underscore => {
self.write_expr_ty(repeat, usize);
@ -1389,7 +1403,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
}
self.types.never
self.types.types.never
}
fn infer_expr_become(&mut self, expr: ExprId) -> Ty<'db> {
@ -1410,7 +1424,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
self.types.never
self.types.types.never
}
fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation<'db>) -> Ty<'db> {
@ -1501,7 +1515,7 @@ impl<'db> InferenceContext<'_, 'db> {
mem::replace(&mut this.diverges, Diverges::Maybe);
this.infer_expr_coerce(
*expr,
&Expectation::HasType(this.types.never),
&Expectation::HasType(this.types.types.never),
ExprIsRead::Yes,
);
this.diverges = previous_diverges;
@ -1513,7 +1527,7 @@ impl<'db> InferenceContext<'_, 'db> {
} else {
this.infer_expr_coerce(
expr,
&Expectation::HasType(this.types.unit),
&Expectation::HasType(this.types.types.unit),
ExprIsRead::Yes,
);
}
@ -1540,7 +1554,7 @@ impl<'db> InferenceContext<'_, 'db> {
if this
.coerce(
expr.into(),
this.types.unit,
this.types.types.unit,
t,
AllowTwoPhase::No,
ExprIsRead::Yes,
@ -1549,12 +1563,15 @@ impl<'db> InferenceContext<'_, 'db> {
{
this.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: t, actual: this.types.unit },
TypeMismatch {
expected: t.store(),
actual: this.types.types.unit.store(),
},
);
}
t
} else {
this.types.unit
this.types.types.unit
}
}
});
@ -1567,7 +1584,7 @@ impl<'db> InferenceContext<'_, 'db> {
&mut self,
receiver_ty: Ty<'db>,
name: &Name,
) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment<'db>>, bool)> {
) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment>, bool)> {
let interner = self.interner();
let mut autoderef = self.table.autoderef_with_tracking(receiver_ty);
let mut private_field = None;
@ -1612,6 +1629,7 @@ impl<'db> InferenceContext<'_, 'db> {
return None;
}
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.get()
.instantiate(interner, parameters);
Some((Either::Left(field_id), ty))
});
@ -1629,6 +1647,7 @@ impl<'db> InferenceContext<'_, 'db> {
let adjustments =
self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.get()
.instantiate(self.interner(), subst);
let ty = self.process_remote_user_written_ty(ty);
@ -1679,7 +1698,7 @@ impl<'db> InferenceContext<'_, 'db> {
);
self.push_diagnostic(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
receiver: receiver_ty,
receiver: receiver_ty.store(),
name: name.clone(),
method_with_same_name_exists: resolved.is_ok(),
});
@ -1755,7 +1774,7 @@ impl<'db> InferenceContext<'_, 'db> {
None => {
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
call_expr: tgt_expr,
found: callee_ty,
found: callee_ty.store(),
});
(Vec::new(), Ty::new_error(interner, ErrorGuaranteed))
}
@ -1867,9 +1886,9 @@ impl<'db> InferenceContext<'_, 'db> {
self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall {
expr: tgt_expr,
receiver: receiver_ty,
receiver: receiver_ty.store(),
name: method_name.clone(),
field_with_same_name: field_with_same_name_exists,
field_with_same_name: field_with_same_name_exists.map(|it| it.store()),
assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id),
});
@ -1921,7 +1940,7 @@ impl<'db> InferenceContext<'_, 'db> {
let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() {
(sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..])
} else {
(self.types.error, &[] as _)
(self.types.types.error, &[] as _)
};
let ret_ty = sig.output();
self.table.unify(formal_receiver_ty, receiver_ty);
@ -2115,10 +2134,10 @@ impl<'db> InferenceContext<'_, 'db> {
&& args_count_matches
{
// Don't report type mismatches if there is a mismatch in args count.
self.result
.type_mismatches
.get_or_insert_default()
.insert((*arg).into(), TypeMismatch { expected, actual: found });
self.result.type_mismatches.get_or_insert_default().insert(
(*arg).into(),
TypeMismatch { expected: expected.store(), actual: found.store() },
);
}
}
}
@ -2145,15 +2164,13 @@ impl<'db> InferenceContext<'_, 'db> {
if let ItemContainerId::TraitId(trait_) = f.lookup(self.db).container {
// construct a TraitRef
let trait_params_len = generics(self.db, trait_.into()).len();
let substs = GenericArgs::new_from_iter(
self.interner(),
parameters.as_slice()[..trait_params_len].iter().copied(),
);
let substs =
GenericArgs::new_from_slice(&parameters.as_slice()[..trait_params_len]);
self.table.register_predicate(Obligation::new(
self.interner(),
ObligationCause::new(),
self.table.param_env,
TraitRef::new(self.interner(), trait_.into(), substs),
TraitRef::new_from_args(self.interner(), trait_.into(), substs),
));
}
}

View file

@ -151,8 +151,8 @@ impl<'db> InferenceContext<'_, 'db> {
// type, `?T` is not considered unsolved, but `?I` is. The
// same is true for float variables.)
let fallback = match ty.kind() {
TyKind::Infer(rustc_type_ir::IntVar(_)) => self.types.i32,
TyKind::Infer(rustc_type_ir::FloatVar(_)) => self.types.f64,
TyKind::Infer(rustc_type_ir::IntVar(_)) => self.types.types.i32,
TyKind::Infer(rustc_type_ir::FloatVar(_)) => self.types.types.f64,
_ => match diverging_fallback.get(&ty) {
Some(&fallback_ty) => fallback_ty,
None => return false,
@ -337,7 +337,7 @@ impl<'db> InferenceContext<'_, 'db> {
match behavior {
DivergingFallbackBehavior::ToUnit => {
debug!("fallback to () - legacy: {:?}", diverging_vid);
fallback_to(self.types.unit);
fallback_to(self.types.types.unit);
}
DivergingFallbackBehavior::ContextDependent => {
// FIXME: rustc does the following, but given this is only relevant when the unstable
@ -368,14 +368,14 @@ impl<'db> InferenceContext<'_, 'db> {
// // set, see the relationship finding module in
// // compiler/rustc_trait_selection/src/traits/relationships.rs.
// debug!("fallback to () - found trait and projection: {:?}", diverging_vid);
// fallback_to(self.types.unit);
// fallback_to(self.types.types.unit);
// }
if can_reach_non_diverging {
debug!("fallback to () - reached non-diverging: {:?}", diverging_vid);
fallback_to(self.types.unit);
fallback_to(self.types.types.unit);
} else {
debug!("fallback to ! - all diverging: {:?}", diverging_vid);
fallback_to(self.types.never);
fallback_to(self.types.types.never);
}
}
DivergingFallbackBehavior::ToNever => {
@ -383,7 +383,7 @@ impl<'db> InferenceContext<'_, 'db> {
"fallback to ! - `rustc_never_type_mode = \"fallback_to_never\")`: {:?}",
diverging_vid
);
fallback_to(self.types.never);
fallback_to(self.types.types.never);
}
}
}

View file

@ -26,8 +26,8 @@ impl<'db> InferenceContext<'_, 'db> {
Adjust::Deref(Some(d)) => {
if mutability == Mutability::Mut {
let source_ty = match adjustments.peek() {
Some(prev_adj) => prev_adj.target,
None => self.result.type_of_expr[tgt_expr],
Some(prev_adj) => prev_adj.target.as_ref(),
None => self.result.type_of_expr[tgt_expr].as_ref(),
};
if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
&self.table,

View file

@ -39,7 +39,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
&& is_builtin_binop(lhs_ty, rhs_ty, category)
{
self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category);
self.types.unit
self.types.types.unit
} else {
return_ty
};
@ -67,20 +67,20 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
// && and || are a simple case.
self.infer_expr_coerce(
lhs_expr,
&Expectation::HasType(self.types.bool),
&Expectation::HasType(self.types.types.bool),
ExprIsRead::Yes,
);
let lhs_diverges = self.diverges;
self.infer_expr_coerce(
rhs_expr,
&Expectation::HasType(self.types.bool),
&Expectation::HasType(self.types.types.bool),
ExprIsRead::Yes,
);
// Depending on the LHS' value, the RHS can never execute.
self.diverges = lhs_diverges;
self.types.bool
self.types.types.bool
}
_ => {
// Otherwise, we always treat operators as if they are
@ -131,9 +131,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
match category {
BinOpCategory::Shortcircuit => {
self.demand_suptype(self.types.bool, lhs_ty);
self.demand_suptype(self.types.bool, rhs_ty);
self.types.bool
self.demand_suptype(self.types.types.bool, lhs_ty);
self.demand_suptype(self.types.types.bool, rhs_ty);
self.types.types.bool
}
BinOpCategory::Shift => {
@ -150,7 +150,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
BinOpCategory::Comparison => {
// both LHS and RHS and result will have the same type
self.demand_suptype(lhs_ty, rhs_ty);
self.types.bool
self.types.types.bool
}
}
}
@ -213,7 +213,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
};
self.write_expr_adj(lhs_expr, Box::new([autoref]));
}
@ -227,7 +227,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[1],
target: method.sig.inputs_and_output.inputs()[1].store(),
};
// HACK(eddyb) Bypass checks due to reborrows being in
// some cases applied on the RHS, on top of which we need
@ -251,7 +251,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
}
Err(_errors) => {
// FIXME: Report diagnostic.
self.types.error
self.types.types.error
}
};
@ -271,7 +271,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
}
Err(_errors) => {
// FIXME: Report diagnostic.
self.types.error
self.types.types.error
}
}
}

View file

@ -112,12 +112,12 @@ impl<'db> InferenceContext<'_, 'db> {
_ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty);
}
self.result.type_of_opaque.insert(def_id, ty.ty);
self.result.type_of_opaque.insert(def_id, ty.ty.store());
continue;
}
self.result.type_of_opaque.insert(def_id, self.types.error);
self.result.type_of_opaque.insert(def_id, self.types.types.error.store());
}
}
@ -139,9 +139,10 @@ impl<'db> InferenceContext<'_, 'db> {
let at = self.table.infer_ctxt.at(&cause, self.table.param_env);
let hidden_type = match at.deeply_normalize(hidden_type) {
Ok(hidden_type) => hidden_type,
Err(_errors) => OpaqueHiddenType { ty: self.types.error },
Err(_errors) => OpaqueHiddenType { ty: self.types.types.error },
};
let hidden_type = fold_regions(self.interner(), hidden_type, |_, _| self.types.re_erased);
let hidden_type =
fold_regions(self.interner(), hidden_type, |_, _| self.types.regions.erased);
UsageKind::HasDefiningUse(hidden_type)
}
}

View file

@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::name::Name;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, Ty as _};
use stdx::TupleExt;
use crate::{
@ -82,7 +82,7 @@ impl<'db> InferenceContext<'_, 'db> {
{
// FIXME(DIAGNOSE): private tuple field
}
let f = field_types[local_id];
let f = field_types[local_id].get();
let expected_ty = match substs {
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
@ -146,7 +146,7 @@ impl<'db> InferenceContext<'_, 'db> {
variant: def,
});
}
let f = field_types[local_id];
let f = field_types[local_id].get();
let expected_ty = match substs {
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
@ -234,7 +234,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
if let Some(uncovered) = elements.get(element_tys.len()..) {
for &elem in uncovered {
self.infer_pat(elem, self.types.error, default_bm, decl);
self.infer_pat(elem, self.types.types.error, default_bm, decl);
}
}
pat_ty
@ -270,7 +270,7 @@ impl<'db> InferenceContext<'_, 'db> {
} else if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let TyKind::Ref(_lifetime, inner, mutability) = expected.kind() {
pat_adjustments.push(expected);
pat_adjustments.push(expected.store());
expected = self.table.try_structurally_resolve_type(inner);
default_bm = match default_bm {
BindingMode::Move => BindingMode::Ref(mutability),
@ -333,7 +333,10 @@ impl<'db> InferenceContext<'_, 'db> {
Err(_) => {
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected, actual: ty_inserted_vars },
TypeMismatch {
expected: expected.store(),
actual: ty_inserted_vars.store(),
},
);
self.write_pat_ty(pat, ty);
// We return `expected` to prevent cascading errors. I guess an alternative is to
@ -372,7 +375,7 @@ impl<'db> InferenceContext<'_, 'db> {
Some((adt, subst)) if adt == box_adt => {
(subst.type_at(0), subst.as_slice().get(1).and_then(|a| a.as_type()))
}
_ => (self.types.error, None),
_ => (self.types.types.error, None),
};
let inner_ty = self.infer_pat(*inner, inner_ty, default_bm, decl);
@ -413,10 +416,10 @@ impl<'db> InferenceContext<'_, 'db> {
) {
Ok(ty) => ty,
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: lhs_ty });
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected: expected.store(), actual: lhs_ty.store() },
);
// `rhs_ty` is returned so no further type mismatches are
// reported because of this mismatch.
expected
@ -432,22 +435,22 @@ impl<'db> InferenceContext<'_, 'db> {
let ty = self.insert_type_vars_shallow(ty);
// FIXME: This never check is odd, but required with out we do inference right now
if !expected.is_never() && !self.unify(ty, expected) {
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected: expected.store(), actual: ty.store() },
);
}
self.write_pat_ty(pat, ty);
self.pat_ty_after_adjustment(pat)
}
fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty<'db> {
*self
.result
self.result
.pat_adjustments
.get(&pat)
.and_then(|it| it.last())
.unwrap_or(&self.result.type_of_pat[pat])
.unwrap_or_else(|| &self.result.type_of_pat[pat])
.as_ref()
}
fn infer_ref_pat(
@ -571,10 +574,14 @@ impl<'db> InferenceContext<'_, 'db> {
{
let inner = self.table.try_structurally_resolve_type(inner);
if matches!(inner.kind(), TyKind::Slice(_)) {
let elem_ty = self.types.u8;
let elem_ty = self.types.types.u8;
let slice_ty = Ty::new_slice(self.interner(), elem_ty);
let ty =
Ty::new_ref(self.interner(), self.types.re_static, slice_ty, Mutability::Not);
let ty = Ty::new_ref(
self.interner(),
self.types.regions.statik,
slice_ty,
Mutability::Not,
);
self.write_expr_ty(expr, ty);
return ty;
}

View file

@ -64,7 +64,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
ValueNs::LocalBinding(pat) => {
return match self.result.type_of_binding.get(pat) {
Some(ty) => Some(ValuePathResolution::NonGeneric(*ty)),
Some(ty) => Some(ValuePathResolution::NonGeneric(ty.as_ref())),
None => {
never!("uninferred pattern?");
None
@ -102,7 +102,7 @@ impl<'db> InferenceContext<'_, 'db> {
// This is something like `TypeAlias::<Args>::EnumVariant`. Do not call `substs_from_path()`,
// as it'll try to re-lower the previous segment assuming it refers to the enum, but it refers
// to the type alias and they may have different generics.
self.types.empty_args
self.types.empty.generic_args
} else {
self.with_body_ty_lowering(|ctx| {
let mut path_ctx = ctx.at_path(path, id);
@ -240,11 +240,8 @@ impl<'db> InferenceContext<'_, 'db> {
if let ItemContainerId::TraitId(trait_) = container {
let parent_len = generics(self.db, def).parent_generics().map_or(0, |g| g.len_self());
let parent_subst = GenericArgs::new_from_iter(
interner,
subst.as_slice()[..parent_len].iter().copied(),
);
let trait_ref = TraitRef::new(interner, trait_.into(), parent_subst);
let parent_subst = GenericArgs::new_from_slice(&subst.as_slice()[..parent_len]);
let trait_ref = TraitRef::new_from_args(interner, trait_.into(), parent_subst);
self.table.register_predicate(Obligation::new(
interner,
ObligationCause::new(),
@ -339,7 +336,7 @@ impl<'db> InferenceContext<'_, 'db> {
[ty.into()],
|_, id, _| self.table.next_var_for_param(id),
);
let trait_ref = TraitRef::new(self.interner(), trait_.into(), args);
let trait_ref = TraitRef::new_from_args(self.interner(), trait_.into(), args);
self.table.register_predicate(Obligation::new(
self.interner(),
ObligationCause::new(),

View file

@ -65,7 +65,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
oprnd_expr,
Box::new([Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
}]),
);
} else {
@ -125,7 +125,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
ctx.interner(),
ObligationCause::new(),
ctx.table.param_env,
ClauseKind::ConstArgHasType(ct, ctx.types.usize),
ClauseKind::ConstArgHasType(ct, ctx.types.types.usize),
));
self_ty = Ty::new_slice(ctx.interner(), element_ty);
} else {
@ -151,7 +151,8 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
{
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty),
target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty)
.store(),
});
} else {
panic!("input to index is not a ref?");
@ -159,7 +160,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
if unsize {
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::Unsize),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
});
}
autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice());
@ -283,7 +284,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
None => return,
};
debug!("convert_place_op_to_mutable: method={:?}", method);
self.result.method_resolutions.insert(expr, (method.def_id, method.args));
self.result.method_resolutions.insert(expr, (method.def_id, method.args.store()));
let TyKind::Ref(region, _, Mutability::Mut) =
method.sig.inputs_and_output.inputs()[0].kind()
@ -308,9 +309,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
allow_two_phase_borrow: AllowTwoPhase::No,
};
adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl));
adjustment.target = Ty::new_ref(interner, region, source, mutbl.into());
adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()).store();
}
source = adjustment.target;
source = adjustment.target.as_ref();
}
// If we have an autoref followed by unsizing at the end, fix the unsize target.
@ -320,7 +321,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target },
] = adjustments[..]
{
*target = method.sig.inputs_and_output.inputs()[0];
*target = method.sig.inputs_and_output.inputs()[0].store();
}
}
}

View file

@ -9,7 +9,7 @@ use intern::sym;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom,
inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _},
inherent::{Const as _, GenericArg as _, IntoKind, Ty as _},
solve::Certainty,
};
use smallvec::SmallVec;
@ -542,16 +542,14 @@ impl<'db> InferenceTable<'db> {
})
.take(num_args),
);
let args = [ty, arg_ty];
let trait_ref = TraitRef::new(self.interner(), fn_trait.into(), args);
let args = GenericArgs::new_from_slice(&[ty.into(), arg_ty.into()]);
let trait_ref = TraitRef::new_from_args(self.interner(), fn_trait.into(), args);
let proj_args = self
.infer_ctxt
.fill_rest_fresh_args(output_assoc_type.into(), args.into_iter().map(Into::into));
let proj_args = self.infer_ctxt.fill_rest_fresh_args(output_assoc_type.into(), args);
let projection = Ty::new_alias(
self.interner(),
rustc_type_ir::AliasTyKind::Projection,
AliasTy::new(self.interner(), output_assoc_type.into(), proj_args),
AliasTy::new_from_args(self.interner(), output_assoc_type.into(), proj_args),
);
let pred = Predicate::upcast_from(trait_ref, self.interner());
@ -560,7 +558,8 @@ impl<'db> InferenceTable<'db> {
let return_ty = self.normalize_alias_ty(projection);
for &fn_x in subtraits {
let fn_x_trait = fn_x.get_id(lang_items)?;
let trait_ref = TraitRef::new(self.interner(), fn_x_trait.into(), args);
let trait_ref =
TraitRef::new_from_args(self.interner(), fn_x_trait.into(), args);
let pred = Predicate::upcast_from(trait_ref, self.interner());
if !self.try_obligation(pred).no_solution() {
return Some((fn_x, arg_tys, return_ty));
@ -640,6 +639,7 @@ impl<'db> InferenceTable<'db> {
let struct_data = id.fields(self.db);
if let Some((last_field, _)) = struct_data.fields().iter().next_back() {
let last_field_ty = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), subst);
if structs.contains(&ty) {
// A struct recursively contains itself as a tail field somewhere.

View file

@ -154,7 +154,7 @@ impl<'a, 'db> UninhabitedFrom<'a, 'db> {
let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) };
for (fid, _) in fields.iter() {
self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?;
self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid].get(), subst)?;
}
CONTINUE_OPAQUELY_INHABITED
}

View file

@ -14,10 +14,7 @@ use rustc_abi::{
TargetDataLayout, WrappingRange,
};
use rustc_index::IndexVec;
use rustc_type_ir::{
FloatTy, IntTy, UintTy,
inherent::{IntoKind, SliceLike},
};
use rustc_type_ir::{FloatTy, IntTy, UintTy, inherent::IntoKind};
use triomphe::Arc;
use crate::{
@ -25,9 +22,10 @@ use crate::{
consteval::try_const_usize,
db::HirDatabase,
next_solver::{
DbInterner, GenericArgs, Ty, TyKind, TypingMode,
DbInterner, GenericArgs, StoredTy, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
},
traits::StoredParamEnvAndCrate,
};
pub(crate) use self::adt::layout_of_adt_cycle_result;
@ -144,22 +142,22 @@ fn layout_of_simd_ty<'db>(
let Some(TyKind::Array(e_ty, e_len)) = fields
.next()
.filter(|_| fields.next().is_none())
.map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind())
.map(|f| (*f.1).get().instantiate(DbInterner::new_no_crate(db), args).kind())
else {
return Err(LayoutError::InvalidSimdType);
};
let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64;
let e_ly = db.layout_of_ty(e_ty, env)?;
let e_ly = db.layout_of_ty(e_ty.store(), env.store())?;
let cx = LayoutCx::new(dl);
Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?))
}
pub fn layout_of_ty_query<'db>(
db: &'db dyn HirDatabase,
ty: Ty<'db>,
trait_env: ParamEnvAndCrate<'db>,
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: StoredTy,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let interner = DbInterner::new_with(db, krate);
@ -170,19 +168,29 @@ pub fn layout_of_ty_query<'db>(
let cx = LayoutCx::new(dl);
let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let cause = ObligationCause::dummy();
let ty = infer_ctxt.at(&cause, trait_env.param_env).deeply_normalize(ty).unwrap_or(ty);
let ty = infer_ctxt
.at(&cause, trait_env.param_env())
.deeply_normalize(ty.as_ref())
.unwrap_or(ty.as_ref());
let result = match ty.kind() {
TyKind::Adt(def, args) => {
match def.inner().id {
hir_def::AdtId::StructId(s) => {
let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
return layout_of_simd_ty(
db,
s,
repr.packed(),
&args,
trait_env.as_ref(),
&target,
);
}
}
_ => {}
}
return db.layout_of_adt(def.inner().id, args, trait_env);
return db.layout_of_adt(def.inner().id, args.store(), trait_env);
}
TyKind::Bool => Layout::scalar(
dl,
@ -246,21 +254,23 @@ pub fn layout_of_ty_query<'db>(
),
TyKind::Tuple(tys) => {
let kind =
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
if tys.is_empty() { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
let fields =
tys.iter().map(|k| db.layout_of_ty(k, trait_env)).collect::<Result<Vec<_>, _>>()?;
let fields = tys
.iter()
.map(|k| db.layout_of_ty(k.store(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.calc.univariant(&fields, &ReprOptions::default(), kind)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element, trait_env)?;
let element = db.layout_of_ty(element.store(), trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, Some(count))?
}
TyKind::Slice(element) => {
let element = db.layout_of_ty(element, trait_env)?;
let element = db.layout_of_ty(element.store(), trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, None)?
}
TyKind::Str => {
@ -325,9 +335,11 @@ pub fn layout_of_ty_query<'db>(
let fields = captures
.iter()
.map(|it| {
let ty =
it.ty.instantiate(interner, args.split_closure_args_untupled().parent_args);
db.layout_of_ty(ty, trait_env)
let ty = it
.ty
.get()
.instantiate(interner, args.split_closure_args_untupled().parent_args);
db.layout_of_ty(ty.store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
@ -357,10 +369,11 @@ pub fn layout_of_ty_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_ty_cycle_result<'db>(
pub(crate) fn layout_of_ty_cycle_result(
_: &dyn HirDatabase,
_: Ty<'db>,
_: ParamEnvAndCrate<'db>,
_: salsa::Id,
_: StoredTy,
_: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}
@ -376,7 +389,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) -
let mut it = data.fields().iter().rev();
match it.next() {
Some((f, _)) => {
let last_field_ty = field_ty(db, struct_id.into(), f, &args);
let last_field_ty = field_ty(db, struct_id.into(), f, args);
struct_tail_erasing_lifetimes(db, last_field_ty)
}
None => pointee,
@ -397,9 +410,9 @@ fn field_ty<'a>(
db: &'a dyn HirDatabase,
def: hir_def::VariantId,
fd: LocalFieldId,
args: &GenericArgs<'a>,
args: GenericArgs<'a>,
) -> Ty<'a> {
db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args)
db.field_types(def)[fd].get().instantiate(DbInterner::new_no_crate(db), args)
}
fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {

View file

@ -13,17 +13,17 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
ParamEnvAndCrate,
db::HirDatabase,
layout::{Layout, LayoutCx, LayoutError, field_ty},
next_solver::GenericArgs,
next_solver::StoredGenericArgs,
traits::StoredParamEnvAndCrate,
};
pub fn layout_of_adt_query<'db>(
db: &'db dyn HirDatabase,
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
args: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
args: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let Ok(target) = db.target_data_layout(krate) else {
@ -34,7 +34,9 @@ pub fn layout_of_adt_query<'db>(
let handle_variant = |def: VariantId, var: &VariantFields| {
var.fields()
.iter()
.map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env))
.map(|(fd, _)| {
db.layout_of_ty(field_ty(db, def, fd, args.as_ref()).store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr, is_special_no_niche) = match def {
@ -95,11 +97,12 @@ pub fn layout_of_adt_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_adt_cycle_result<'db>(
_: &'db dyn HirDatabase,
pub(crate) fn layout_of_adt_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_def: AdtId,
_args: GenericArgs<'db>,
_trait_env: ParamEnvAndCrate<'db>,
_args: StoredGenericArgs,
_trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -98,7 +98,7 @@ fn eval_goal(
Either::Left(it) => it.krate(&db),
Either::Right(it) => it.krate(&db),
};
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate })
db.layout_of_ty(goal_ty.store(), ParamEnvAndCrate { param_env, krate }.store())
})
}
@ -140,10 +140,10 @@ fn eval_expr(
.unwrap()
.0;
let infer = InferenceResult::for_body(&db, function_id.into());
let goal_ty = infer.type_of_binding[b];
let goal_ty = infer.type_of_binding[b].clone();
let param_env = db.trait_environment(function_id.into());
let krate = function_id.krate(&db);
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate })
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }.store())
})
}

View file

@ -61,11 +61,12 @@ use hir_def::{CallableDefId, TypeOrConstParamId, type_ref::Rawness};
use hir_expand::name::Name;
use indexmap::{IndexMap, map::Entry};
use intern::{Symbol, sym};
use macros::GenericTypeVisitable;
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::{
BoundVarIndexKind, TypeSuperVisitable, TypeVisitableExt, UpcastFrom,
inherent::{IntoKind, SliceLike, Ty as _},
inherent::{IntoKind, Ty as _},
};
use syntax::ast::{ConstArg, make};
use traits::FnTrait;
@ -76,8 +77,8 @@ use crate::{
infer::unify::InferenceTable,
next_solver::{
AliasTy, Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, Canonical,
CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, FnSig, PolyFnSig, Predicate,
Region, RegionKind, TraitRef, Ty, TyKind, Tys, abi,
CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, FnSig, GenericArgs,
PolyFnSig, Predicate, Region, RegionKind, TraitRef, Ty, TyKind, Tys, abi,
},
};
@ -87,7 +88,7 @@ pub use infer::{
InferenceTyDiagnosticSource, OverloadedDeref, PointerCast,
cast::CastError,
closure::analysis::{CaptureKind, CapturedItem},
could_coerce, could_unify, could_unify_deeply,
could_coerce, could_unify, could_unify_deeply, infer_query_with_inspect,
};
pub use lower::{
GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
@ -104,7 +105,7 @@ pub use utils::{
/// A constant can have reference to other things. Memory map job is holding
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub enum MemoryMap<'db> {
#[default]
Empty,
@ -112,7 +113,7 @@ pub enum MemoryMap<'db> {
Complex(Box<ComplexMemoryMap<'db>>),
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct ComplexMemoryMap<'db> {
memory: IndexMap<usize, Box<[u8]>, FxBuildHasher>,
vtable: VTableMap<'db>,
@ -134,7 +135,7 @@ impl ComplexMemoryMap<'_> {
}
impl<'db> MemoryMap<'db> {
pub fn vtable_ty(&self, id: usize) -> Result<Ty<'db>, MirEvalError<'db>> {
pub fn vtable_ty(&self, id: usize) -> Result<Ty<'db>, MirEvalError> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
@ -150,8 +151,8 @@ impl<'db> MemoryMap<'db> {
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
fn transform_addresses(
&self,
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError<'db>>,
) -> Result<FxHashMap<usize, usize>, MirEvalError<'db>> {
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<FxHashMap<usize, usize>, MirEvalError> {
let mut transform = |(addr, val): (&usize, &[u8])| {
let addr = *addr;
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
@ -333,9 +334,9 @@ impl FnAbi {
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
pub enum ImplTraitId<'db> {
ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>),
TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>),
pub enum ImplTraitId {
ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx),
TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx),
}
/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
@ -468,7 +469,7 @@ where
Canonical {
value,
max_universe: rustc_type_ir::UniverseIndex::ZERO,
variables: CanonicalVars::new_from_iter(interner, error_replacer.vars),
variables: CanonicalVars::new_from_slice(&error_replacer.vars),
}
}
@ -490,12 +491,12 @@ pub fn callable_sig_from_fn_trait<'db>(
// - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
let args_ty = table.next_ty_var();
let args = [self_ty, args_ty];
let trait_ref = TraitRef::new(table.interner(), fn_once_trait.into(), args);
let args = GenericArgs::new_from_slice(&[self_ty.into(), args_ty.into()]);
let trait_ref = TraitRef::new_from_args(table.interner(), fn_once_trait.into(), args);
let projection = Ty::new_alias(
table.interner(),
rustc_type_ir::AliasTyKind::Projection,
AliasTy::new(table.interner(), output_assoc_type.into(), args),
AliasTy::new_from_args(table.interner(), output_assoc_type.into(), args),
);
let pred = Predicate::upcast_from(trait_ref, table.interner());
@ -504,7 +505,7 @@ pub fn callable_sig_from_fn_trait<'db>(
let return_ty = table.normalize_alias_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(lang_items)?;
let trait_ref = TraitRef::new(table.interner(), fn_x_trait.into(), args);
let trait_ref = TraitRef::new_from_args(table.interner(), fn_x_trait.into(), args);
if !table
.try_obligation(Predicate::upcast_from(trait_ref, table.interner()))
.no_solution()

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,7 @@ use hir_def::{
use hir_expand::name::Name;
use rustc_type_ir::{
AliasTerm, AliasTy, AliasTyKind,
inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _},
inherent::{GenericArgs as _, Region as _, Ty as _},
};
use smallvec::SmallVec;
use stdx::never;
@ -45,17 +45,15 @@ use super::{
const_param_ty_query, ty_query,
};
type CallbackData<'a, 'db> = Either<
PathDiagnosticCallbackData,
crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>,
>;
type CallbackData<'a> =
Either<PathDiagnosticCallbackData, crate::infer::diagnostics::PathDiagnosticCallbackData<'a>>;
// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
pub(crate) struct PathDiagnosticCallback<'a, 'db> {
pub(crate) data: CallbackData<'a, 'db>,
pub(crate) data: CallbackData<'a>,
pub(crate) callback:
fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
}
pub(crate) struct PathLoweringContext<'a, 'b, 'db> {
@ -508,7 +506,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
Some(Ty::new_alias(
interner,
AliasTyKind::Projection,
AliasTy::new(interner, associated_ty.into(), substs),
AliasTy::new_from_args(interner, associated_ty.into(), substs),
))
};
named_associated_type_shorthand_candidates(
@ -555,7 +553,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
ValueTyDefId::UnionId(it) => it.into(),
ValueTyDefId::ConstId(it) => it.into(),
ValueTyDefId::StaticId(_) => {
return GenericArgs::new_from_iter(interner, []);
return GenericArgs::empty(interner);
}
ValueTyDefId::EnumVariantId(var) => {
// the generic args for an enum variant may be either specified
@ -1285,7 +1283,7 @@ pub(crate) fn substs_from_args_and_bindings<'db>(
}
}
GenericArgs::new_from_iter(interner, substs)
GenericArgs::new_from_slice(&substs)
}
fn type_looks_like_const(

View file

@ -26,7 +26,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
TypeVisitableExt,
fast_reject::{TreatParams, simplify_type},
inherent::{BoundExistentialPredicates, IntoKind, SliceLike},
inherent::{BoundExistentialPredicates, IntoKind},
};
use stdx::impl_from;
use triomphe::Arc;
@ -362,7 +362,7 @@ pub fn lookup_impl_const<'db>(
ItemContainerId::TraitId(id) => id,
_ => return (const_id, subs),
};
let trait_ref = TraitRef::new(interner, trait_id.into(), subs);
let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), subs);
let const_signature = db.const_signature(const_id);
let name = match const_signature.name.as_ref() {
@ -392,10 +392,10 @@ pub fn is_dyn_method<'db>(
};
let trait_params = db.generic_params(trait_id.into()).len();
let fn_params = fn_subst.len() - trait_params;
let trait_ref = TraitRef::new(
let trait_ref = TraitRef::new_from_args(
interner,
trait_id.into(),
GenericArgs::new_from_iter(interner, fn_subst.iter().take(trait_params)),
GenericArgs::new_from_slice(&fn_subst[..trait_params]),
);
let self_ty = trait_ref.self_ty();
if let TyKind::Dynamic(d, _) = self_ty.kind() {
@ -427,10 +427,10 @@ pub(crate) fn lookup_impl_method_query<'db>(
return (func, fn_subst);
};
let trait_params = db.generic_params(trait_id.into()).len();
let trait_ref = TraitRef::new(
let trait_ref = TraitRef::new_from_args(
interner,
trait_id.into(),
GenericArgs::new_from_iter(interner, fn_subst.iter().take(trait_params)),
GenericArgs::new_from_slice(&fn_subst[..trait_params]),
);
let name = &db.function_signature(func).name;
@ -505,13 +505,19 @@ pub(crate) fn find_matching_impl<'db>(
}
#[salsa::tracked(returns(ref))]
fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Crate]> {
fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase, krate: Crate) -> Box<[Crate]> {
let _p = tracing::info_span!("crates_containing_incoherent_inherent_impls").entered();
// We assume that only sysroot crates contain `#[rustc_has_incoherent_inherent_impls]`
// impls, since this is an internal feature and only std uses it.
db.all_crates().iter().copied().filter(|krate| krate.data(db).origin.is_lang()).collect()
krate.transitive_deps(db).into_iter().filter(|krate| krate.data(db).origin.is_lang()).collect()
}
pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
pub fn with_incoherent_inherent_impls(
db: &dyn HirDatabase,
krate: Crate,
self_ty: &SimplifiedType,
mut callback: impl FnMut(&[ImplId]),
) {
let has_incoherent_impls = match self_ty.def() {
Some(def_id) => match def_id.try_into() {
Ok(def_id) => AttrFlags::query(db, def_id)
@ -520,26 +526,14 @@ pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType)
},
_ => true,
};
return if !has_incoherent_impls {
&[]
} else {
incoherent_inherent_impls_query(db, (), self_ty)
};
#[salsa::tracked(returns(ref))]
fn incoherent_inherent_impls_query(
db: &dyn HirDatabase,
_force_query_input_to_be_interned: (),
self_ty: SimplifiedType,
) -> Box<[ImplId]> {
let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
let mut result = Vec::new();
for &krate in crates_containing_incoherent_inherent_impls(db) {
let impls = InherentImpls::for_crate(db, krate);
result.extend_from_slice(impls.for_self_ty(&self_ty));
}
result.into_boxed_slice()
if !has_incoherent_impls {
return;
}
let _p = tracing::info_span!("incoherent_inherent_impls").entered();
let crates = crates_containing_incoherent_inherent_impls(db, krate);
for &krate in crates {
let impls = InherentImpls::for_crate(db, krate);
callback(impls.for_self_ty(self_ty));
}
}

View file

@ -9,7 +9,7 @@ use hir_def::{
use rustc_type_ir::{
TypeFoldable,
elaborate::elaborate,
inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _},
inherent::{BoundExistentialPredicates, IntoKind, Ty as _},
};
use tracing::debug;
@ -45,7 +45,7 @@ struct ConfirmContext<'a, 'b, 'db> {
pub(crate) struct ConfirmResult<'db> {
pub(crate) callee: MethodCallee<'db>,
pub(crate) illegal_sized_bound: bool,
pub(crate) adjustments: Box<[Adjustment<'db>]>,
pub(crate) adjustments: Box<[Adjustment]>,
}
impl<'a, 'db> InferenceContext<'a, 'db> {
@ -145,7 +145,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
// traits, no trait system method can be called before this point because they
// could alter our Self-type, except for normalizing the receiver from the
// signature (which is also done during probing).
let method_sig_rcvr = method_sig.inputs().as_slice()[0];
let method_sig_rcvr = method_sig.inputs()[0];
debug!(
"confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?}",
self_ty, method_sig_rcvr, method_sig
@ -177,7 +177,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
&mut self,
unadjusted_self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
) -> (Ty<'db>, Box<[Adjustment<'db>]>) {
) -> (Ty<'db>, Box<[Adjustment]>) {
// Commit the autoderefs by calling `autoderef` again, but this
// time writing the results into the various typeck results.
let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty);
@ -200,8 +200,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
// for two-phase borrows.
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target });
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: target.store(),
});
if unsize {
let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() {
@ -213,8 +215,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
)
};
target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into());
adjustments
.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target });
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::Unsize),
target: target.store(),
});
}
}
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => {
@ -228,7 +232,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::MutToConstPointer),
target,
target: target.store(),
});
}
None => {}
@ -482,7 +486,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
if self.ctx.unstable_features.arbitrary_self_types {
self.ctx.result.type_mismatches.get_or_insert_default().insert(
self.expr.into(),
TypeMismatch { expected: method_self_ty, actual: self_ty },
TypeMismatch { expected: method_self_ty.store(), actual: self_ty.store() },
);
}
}

View file

@ -14,7 +14,7 @@ use rustc_type_ir::{
InferTy, TypeVisitableExt, Upcast, Variance,
elaborate::{self, supertrait_def_ids},
fast_reject::{DeepRejectCtxt, TreatParams, simplify_type},
inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};
@ -27,7 +27,7 @@ use crate::{
lower::GenericPredicates,
method_resolution::{
CandidateId, CandidateSource, InherentImpls, MethodError, MethodResolutionContext,
incoherent_inherent_impls, simplified_type_module,
simplified_type_module, with_incoherent_inherent_impls,
},
next_solver::{
Binder, Canonical, ClauseKind, DbInterner, FnSig, GenericArg, GenericArgs, Goal, ParamEnv,
@ -965,9 +965,11 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
else {
panic!("unexpected incoherent type: {:?}", self_ty)
};
for &impl_def_id in incoherent_inherent_impls(self.db(), simp) {
self.assemble_inherent_impl_probe(impl_def_id, receiver_steps);
}
with_incoherent_inherent_impls(self.db(), self.ctx.resolver.krate(), &simp, |impls| {
for &impl_def_id in impls {
self.assemble_inherent_impl_probe(impl_def_id, receiver_steps);
}
});
}
fn assemble_inherent_impl_candidates_for_type(
@ -1975,7 +1977,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
&& self.mode == Mode::MethodCall
{
let sig = self.xform_method_sig(item, args);
(sig.inputs().as_slice()[0], Some(sig.output()))
(sig.inputs()[0], Some(sig.output()))
} else {
(impl_ty, None)
}

View file

@ -12,7 +12,7 @@ use hir_def::{
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
@ -23,7 +23,8 @@ use crate::{
display::{DisplayTarget, HirDisplay},
infer::PointerCast,
next_solver::{
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredConst, StoredGenericArgs,
StoredTy, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -49,16 +50,16 @@ pub(crate) use monomorphization::monomorphized_mir_body_cycle_result;
use super::consteval::try_const_usize;
pub type BasicBlockId<'db> = Idx<BasicBlock<'db>>;
pub type LocalId<'db> = Idx<Local<'db>>;
pub type BasicBlockId = Idx<BasicBlock>;
pub type LocalId = Idx<Local>;
fn return_slot<'db>() -> LocalId<'db> {
fn return_slot() -> LocalId {
LocalId::from_raw(RawIdx::from(0))
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local<'db> {
pub ty: Ty<'db>,
pub struct Local {
pub ty: StoredTy,
}
/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
@ -80,19 +81,19 @@ pub struct Local<'db> {
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Operand<'db> {
kind: OperandKind<'db>,
pub struct Operand {
kind: OperandKind,
// FIXME : This should actually just be of type `MirSpan`.
span: Option<MirSpan>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum OperandKind<'db> {
pub enum OperandKind {
/// Creates a value by loading the given place.
///
/// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
/// is no such requirement.
Copy(Place<'db>),
Copy(Place),
/// Creates a value by performing loading the place, just like the `Copy` operand.
///
@ -101,21 +102,21 @@ pub enum OperandKind<'db> {
/// place without first re-initializing it.
///
/// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
Move(Place<'db>),
Move(Place),
/// Constants are already semantically values, and remain unchanged.
Constant { konst: Const<'db>, ty: Ty<'db> },
Constant { konst: StoredConst, ty: StoredTy },
/// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
/// handles it with the `Constant` variant somehow.
Static(StaticId),
}
impl<'db> Operand<'db> {
impl<'db> Operand {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self {
let interner = DbInterner::conjure();
Operand {
kind: OperandKind::Constant {
konst: Const::new_valtree(interner, ty, data, memory_map),
ty,
konst: Const::new_valtree(interner, ty, data, memory_map).store(),
ty: ty.store(),
},
span: None,
}
@ -125,7 +126,7 @@ impl<'db> Operand<'db> {
Operand::from_concrete_const(data, MemoryMap::default(), ty)
}
fn const_zst(ty: Ty<'db>) -> Operand<'db> {
fn const_zst(ty: Ty<'db>) -> Operand {
Self::from_bytes(Box::default(), ty)
}
@ -133,28 +134,28 @@ impl<'db> Operand<'db> {
db: &'db dyn HirDatabase,
func_id: hir_def::FunctionId,
generic_args: GenericArgs<'db>,
) -> Operand<'db> {
) -> Operand {
let interner = DbInterner::new_no_crate(db);
let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
Operand::from_bytes(Box::default(), ty)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub enum ProjectionElem<'db, V: PartialEq> {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V: PartialEq> {
Deref,
Field(Either<FieldId, TupleFieldId>),
// FIXME: get rid of this, and use FieldId for tuples and closures
ClosureField(usize),
Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V),
Index(V),
ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 },
//Downcast(Option<Symbol>, VariantIdx),
OpaqueCast(Ty<'db>),
OpaqueCast(StoredTy),
}
impl<'db, V: PartialEq> ProjectionElem<'db, V> {
pub fn projected_ty(
impl<V: PartialEq> ProjectionElem<V> {
pub fn projected_ty<'db>(
&self,
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
@ -194,7 +195,7 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> {
},
ProjectionElem::Field(Either::Left(f)) => match base.kind() {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].instantiate(interner, subst)
db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst)
}
ty => {
never!("Only adt has field, found {:?}", ty);
@ -253,18 +254,18 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> {
}
}
type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>;
type PlaceElem = ProjectionElem<LocalId>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProjectionStore<'db> {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem<'db>]>>,
proj_to_id: FxHashMap<Box<[PlaceElem<'db>]>, ProjectionId>,
pub struct ProjectionStore {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem]>>,
proj_to_id: FxHashMap<Box<[PlaceElem]>, ProjectionId>,
}
impl Default for ProjectionStore<'_> {
impl Default for ProjectionStore {
fn default() -> Self {
let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
// Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
@ -273,17 +274,17 @@ impl Default for ProjectionStore<'_> {
}
}
impl<'db> ProjectionStore<'db> {
impl ProjectionStore {
pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option<ProjectionId> {
pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId {
pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@ -304,15 +305,11 @@ impl ProjectionId {
self == ProjectionId::EMPTY
}
pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] {
pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
pub fn project<'db>(
self,
projection: PlaceElem<'db>,
store: &mut ProjectionStore<'db>,
) -> ProjectionId {
pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
@ -320,13 +317,13 @@ impl ProjectionId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place<'db> {
pub local: LocalId<'db>,
pub struct Place {
pub local: LocalId,
pub projection: ProjectionId,
}
impl<'db> Place<'db> {
fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
impl Place {
fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
self.local == child.local
&& child.projection.lookup(store).starts_with(self.projection.lookup(store))
}
@ -334,39 +331,39 @@ impl<'db> Place<'db> {
/// The place itself is not included
fn iterate_over_parents<'a>(
&'a self,
store: &'a ProjectionStore<'db>,
) -> impl Iterator<Item = Place<'db>> + 'a {
store: &'a ProjectionStore,
) -> impl Iterator<Item = Place> + 'a {
let projection = self.projection.lookup(store);
(0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
})
}
fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> {
fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place {
Place { local: self.local, projection: self.projection.project(projection, store) }
}
}
impl<'db> From<LocalId<'db>> for Place<'db> {
fn from(local: LocalId<'db>) -> Self {
impl From<LocalId> for Place {
fn from(local: LocalId) -> Self {
Self { local, projection: ProjectionId::EMPTY }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum AggregateKind<'db> {
pub enum AggregateKind {
/// The type is of the element
Array(Ty<'db>),
Array(StoredTy),
/// The type is of the tuple
Tuple(Ty<'db>),
Adt(VariantId, GenericArgs<'db>),
Tuple(StoredTy),
Adt(VariantId, StoredGenericArgs),
Union(UnionId, FieldId),
Closure(Ty<'db>),
Closure(StoredTy),
//Coroutine(LocalDefId, SubstsRef, Movability),
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SwitchTargets<'db> {
pub struct SwitchTargets {
/// Possible values. The locations to branch to in each case
/// are found in the corresponding indices from the `targets` vector.
values: SmallVec<[u128; 1]>,
@ -383,17 +380,17 @@ pub struct SwitchTargets<'db> {
//
// However weve decided to keep this as-is until we figure a case
// where some other approach seems to be strictly better than other.
targets: SmallVec<[BasicBlockId<'db>; 2]>,
targets: SmallVec<[BasicBlockId; 2]>,
}
impl<'db> SwitchTargets<'db> {
impl SwitchTargets {
/// Creates switch targets from an iterator of values and target blocks.
///
/// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
/// `goto otherwise;`.
pub fn new(
targets: impl Iterator<Item = (u128, BasicBlockId<'db>)>,
otherwise: BasicBlockId<'db>,
targets: impl Iterator<Item = (u128, BasicBlockId)>,
otherwise: BasicBlockId,
) -> Self {
let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
targets.push(otherwise);
@ -402,12 +399,12 @@ impl<'db> SwitchTargets<'db> {
/// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
/// and to `else_` if not.
pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self {
pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
Self { values: smallvec![value], targets: smallvec![then, else_] }
}
/// Returns the fallback target that is jumped to when none of the values match the operand.
pub fn otherwise(&self) -> BasicBlockId<'db> {
pub fn otherwise(&self) -> BasicBlockId {
*self.targets.last().unwrap()
}
@ -417,33 +414,33 @@ impl<'db> SwitchTargets<'db> {
/// including the `otherwise` fallback target.
///
/// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId<'db>)> + '_ {
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
}
/// Returns a slice with all possible jump targets (including the fallback target).
pub fn all_targets(&self) -> &[BasicBlockId<'db>] {
pub fn all_targets(&self) -> &[BasicBlockId] {
&self.targets
}
/// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
/// specific value. This cannot fail, as it'll return the `otherwise`
/// branch if there's not a specific match for the value.
pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> {
pub fn target_for_value(&self, value: u128) -> BasicBlockId {
self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator<'db> {
pub struct Terminator {
pub span: MirSpan,
pub kind: TerminatorKind<'db>,
pub kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind<'db> {
pub enum TerminatorKind {
/// Block has one successor; we continue execution there.
Goto { target: BasicBlockId<'db> },
Goto { target: BasicBlockId },
/// Switches based on the computed value.
///
@ -455,9 +452,9 @@ pub enum TerminatorKind<'db> {
/// Target values may not appear more than once.
SwitchInt {
/// The discriminant value being tested.
discr: Operand<'db>,
discr: Operand,
targets: SwitchTargets<'db>,
targets: SwitchTargets,
},
/// Indicates that the landing pad is finished and that the process should continue unwinding.
@ -508,7 +505,7 @@ pub enum TerminatorKind<'db> {
/// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
/// > the place or one of its "parents" occurred more recently than a move out of it. This does not
/// > consider indirect assignments.
Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option<BasicBlockId<'db>> },
Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
/// Drops the place and assigns a new value to it.
///
@ -541,10 +538,10 @@ pub enum TerminatorKind<'db> {
///
/// Disallowed after drop elaboration.
DropAndReplace {
place: Place<'db>,
value: Operand<'db>,
target: BasicBlockId<'db>,
unwind: Option<BasicBlockId<'db>>,
place: Place,
value: Operand,
target: BasicBlockId,
unwind: Option<BasicBlockId>,
},
/// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
@ -559,18 +556,18 @@ pub enum TerminatorKind<'db> {
/// [#71117]: https://github.com/rust-lang/rust/issues/71117
Call {
/// The function thats being called.
func: Operand<'db>,
func: Operand,
/// Arguments the function is called with.
/// These are owned by the callee, which is free to modify them.
/// This allows the memory occupied by "by-value" arguments to be
/// reused across function calls without duplicating the contents.
args: Box<[Operand<'db>]>,
args: Box<[Operand]>,
/// Where the returned value will be written
destination: Place<'db>,
destination: Place,
/// Where to go after this call returns. If none, the call necessarily diverges.
target: Option<BasicBlockId<'db>>,
target: Option<BasicBlockId>,
/// Cleanups to be done if the call unwinds.
cleanup: Option<BasicBlockId<'db>>,
cleanup: Option<BasicBlockId>,
/// `true` if this is from a call in HIR rather than from an overloaded
/// operator. True for overloaded function call.
from_hir_call: bool,
@ -586,11 +583,11 @@ pub enum TerminatorKind<'db> {
/// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
/// assertion does not fail, execution continues at the specified basic block.
Assert {
cond: Operand<'db>,
cond: Operand,
expected: bool,
//msg: AssertMessage,
target: BasicBlockId<'db>,
cleanup: Option<BasicBlockId<'db>>,
target: BasicBlockId,
cleanup: Option<BasicBlockId>,
},
/// Marks a suspend point.
@ -607,13 +604,13 @@ pub enum TerminatorKind<'db> {
/// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
Yield {
/// The value to return.
value: Operand<'db>,
value: Operand,
/// Where to resume to.
resume: BasicBlockId<'db>,
resume: BasicBlockId,
/// The place to store the resume argument in.
resume_arg: Place<'db>,
resume_arg: Place,
/// Cleanup to be done if the coroutine is dropped at this suspend point.
drop: Option<BasicBlockId<'db>>,
drop: Option<BasicBlockId>,
},
/// Indicates the end of dropping a coroutine.
@ -636,10 +633,10 @@ pub enum TerminatorKind<'db> {
/// Disallowed after drop elaboration.
FalseEdge {
/// The target normal control flow will take.
real_target: BasicBlockId<'db>,
real_target: BasicBlockId,
/// A block control flow could conceptually jump to, but won't in
/// practice.
imaginary_target: BasicBlockId<'db>,
imaginary_target: BasicBlockId,
},
/// A terminator for blocks that only take one path in reality, but where we reserve the right
@ -651,14 +648,14 @@ pub enum TerminatorKind<'db> {
/// Disallowed after drop elaboration.
FalseUnwind {
/// The target normal control flow will take.
real_target: BasicBlockId<'db>,
real_target: BasicBlockId,
/// The imaginary cleanup block link. This particular path will never be taken
/// in practice, but in order to avoid fragility we want to always
/// consider it in borrowck. We don't want to accept programs which
/// pass borrowck only when `panic=abort` or some assertions are disabled
/// due to release vs. debug mode builds. This needs to be an `Option` because
/// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
unwind: Option<BasicBlockId<'db>>,
unwind: Option<BasicBlockId>,
},
}
@ -845,8 +842,8 @@ impl From<hir_def::hir::CmpOp> for BinOp {
}
}
impl<'db> From<Operand<'db>> for Rvalue<'db> {
fn from(x: Operand<'db>) -> Self {
impl From<Operand> for Rvalue {
fn from(x: Operand) -> Self {
Self::Use(x)
}
}
@ -875,14 +872,14 @@ pub enum CastKind {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Rvalue<'db> {
pub enum Rvalue {
/// Yields the operand unchanged
Use(Operand<'db>),
Use(Operand),
/// Creates an array where each element is the value of the operand.
///
/// Corresponds to source code like `[x; 32]`.
Repeat(Operand<'db>, Const<'db>),
Repeat(Operand, StoredConst),
/// Creates a reference of the indicated kind to the place.
///
@ -891,7 +888,7 @@ pub enum Rvalue<'db> {
/// exactly what the behavior of this operation should be.
///
/// `Shallow` borrows are disallowed after drop lowering.
Ref(BorrowKind, Place<'db>),
Ref(BorrowKind, Place),
/// Creates a pointer/reference to the given thread local.
///
@ -922,7 +919,7 @@ pub enum Rvalue<'db> {
/// If the type of the place is an array, this is the array length. For slices (`[T]`, not
/// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
/// ill-formed for places of other types.
Len(Place<'db>),
Len(Place),
/// Performs essentially all of the casts that can be performed via `as`.
///
@ -930,7 +927,7 @@ pub enum Rvalue<'db> {
///
/// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
/// `ArrayToPointer` and `MutToConstPointer` are special.
Cast(CastKind, Operand<'db>, Ty<'db>),
Cast(CastKind, Operand, StoredTy),
// FIXME link to `pointer::offset` when it hits stable.
/// * `Offset` has the same semantics as `pointer::offset`, except that the second
@ -962,7 +959,7 @@ pub enum Rvalue<'db> {
/// when the value of right-hand side is negative.
///
/// Other combinations of types and operators are unsupported.
CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>),
CheckedBinaryOp(BinOp, Operand, Operand),
/// Computes a value as described by the operation.
//NullaryOp(NullOp, Ty),
@ -973,7 +970,7 @@ pub enum Rvalue<'db> {
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
/// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
/// return a value with the same type as their operand.
UnaryOp(UnOp, Operand<'db>),
UnaryOp(UnOp, Operand),
/// Computes the discriminant of the place, returning it as an integer of type
/// `discriminant_ty`. Returns zero for types without discriminant.
@ -983,7 +980,7 @@ pub enum Rvalue<'db> {
/// variant index; use `discriminant_for_variant` to convert.
///
/// [#91095]: https://github.com/rust-lang/rust/issues/91095
Discriminant(Place<'db>),
Discriminant(Place),
/// Creates an aggregate value, like a tuple or struct.
///
@ -993,17 +990,17 @@ pub enum Rvalue<'db> {
///
/// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
/// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>),
Aggregate(AggregateKind, Box<[Operand]>),
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
///
/// This is different from a normal transmute because dataflow analysis will treat the box as
/// initialized but its content as uninitialized. Like other pointer casts, this in general
/// affects alias analysis.
ShallowInitBox(Operand<'db>, Ty<'db>),
ShallowInitBox(Operand, StoredTy),
/// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
ShallowInitBoxWithAlloc(Ty<'db>),
ShallowInitBoxWithAlloc(StoredTy),
/// A CopyForDeref is equivalent to a read from a place at the
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
@ -1013,41 +1010,41 @@ pub enum Rvalue<'db> {
/// read never happened and just projects further. This allows simplifying various MIR
/// optimizations and codegen backends that previously had to handle deref operations anywhere
/// in a place.
CopyForDeref(Place<'db>),
CopyForDeref(Place),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind<'db> {
Assign(Place<'db>, Rvalue<'db>),
FakeRead(Place<'db>),
pub enum StatementKind {
Assign(Place, Rvalue),
FakeRead(Place),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
//},
Deinit(Place<'db>),
StorageLive(LocalId<'db>),
StorageDead(LocalId<'db>),
Deinit(Place),
StorageLive(LocalId),
StorageDead(LocalId),
//Retag(RetagKind, Box<Place>),
//AscribeUserType(Place, UserTypeProjection, Variance),
//Intrinsic(Box<NonDivergingIntrinsic>),
Nop,
}
impl<'db> StatementKind<'db> {
fn with_span(self, span: MirSpan) -> Statement<'db> {
impl StatementKind {
fn with_span(self, span: MirSpan) -> Statement {
Statement { kind: self, span }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Statement<'db> {
pub kind: StatementKind<'db>,
pub struct Statement {
pub kind: StatementKind,
pub span: MirSpan,
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock<'db> {
pub struct BasicBlock {
/// List of statements in this block.
pub statements: Vec<Statement<'db>>,
pub statements: Vec<Statement>,
/// Terminator for this block.
///
@ -1057,7 +1054,7 @@ pub struct BasicBlock<'db> {
/// exception is that certain passes, such as `simplify_cfg`, swap
/// out the terminator temporarily with `None` while they continue
/// to recurse over the set of basic blocks.
pub terminator: Option<Terminator<'db>>,
pub terminator: Option<Terminator>,
/// If true, this block lies on an unwind path. This is used
/// during codegen where distinct kinds of basic blocks may be
@ -1067,29 +1064,29 @@ pub struct BasicBlock<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody<'db> {
pub projection_store: ProjectionStore<'db>,
pub basic_blocks: Arena<BasicBlock<'db>>,
pub locals: Arena<Local<'db>>,
pub start_block: BasicBlockId<'db>,
pub struct MirBody {
pub projection_store: ProjectionStore,
pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>,
pub start_block: BasicBlockId,
pub owner: DefWithBodyId,
pub binding_locals: ArenaMap<BindingId, LocalId<'db>>,
pub param_locals: Vec<LocalId<'db>>,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub param_locals: Vec<LocalId>,
/// This field stores the closures directly owned by this body. It is used
/// in traversing every mir body.
pub closures: Vec<InternedClosureId>,
}
impl<'db> MirBody<'db> {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId<'db>, BindingId> {
impl MirBody {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
}
fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) {
fn for_operand<'db>(
op: &mut Operand<'db>,
f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>),
store: &mut ProjectionStore<'db>,
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
store: &mut ProjectionStore,
) {
match &mut op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {

View file

@ -17,7 +17,7 @@ use crate::{
display::DisplayTarget,
mir::OperandKind,
next_solver::{
DbInterner, GenericArgs, ParamEnv, Ty, TypingMode,
DbInterner, GenericArgs, ParamEnv, StoredTy, Ty, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
};
@ -36,44 +36,44 @@ pub enum MutabilityReason {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MovedOutOfRef<'db> {
pub ty: Ty<'db>,
pub struct MovedOutOfRef {
pub ty: StoredTy,
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved<'db> {
pub ty: Ty<'db>,
pub struct PartiallyMoved {
pub ty: StoredTy,
pub span: MirSpan,
pub local: LocalId<'db>,
pub local: LocalId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion<'db> {
pub local: LocalId<'db>,
pub struct BorrowRegion {
pub local: LocalId,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult<'db> {
pub mir_body: Arc<MirBody<'db>>,
pub mutability_of_locals: ArenaMap<LocalId<'db>, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef<'db>>,
pub partially_moved: Vec<PartiallyMoved<'db>>,
pub borrow_regions: Vec<BorrowRegion<'db>>,
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
}
fn all_mir_bodies<'db>(
db: &'db dyn HirDatabase,
fn all_mir_bodies(
db: &dyn HirDatabase,
def: DefWithBodyId,
mut cb: impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
fn for_closure<'db>(
db: &'db dyn HirDatabase,
mut cb: impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
fn for_closure(
db: &dyn HirDatabase,
c: InternedClosureId,
cb: &mut impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
cb: &mut impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
match db.mir_body_for_closure(c) {
Ok(body) => {
cb(body.clone());
@ -91,10 +91,10 @@ fn all_mir_bodies<'db>(
}
}
pub fn borrowck_query<'db>(
db: &'db dyn HirDatabase,
pub fn borrowck_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>> {
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = tracing::info_span!("borrowck_query").entered();
let module = def.module(db);
let interner = DbInterner::new_with(db, module.krate(db));
@ -125,20 +125,20 @@ fn make_fetch_closure_field<'db>(
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.split_closure_args_untupled().parent_args;
let interner = DbInterner::new_no_crate(db);
captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst)
captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst)
}
}
fn moved_out_of_ref<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> Vec<MovedOutOfRef<'db>> {
body: &MirBody,
) -> Vec<MovedOutOfRef> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty<'db> = body.locals[p.local].ty;
let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref();
let mut is_dereference_of_ref = false;
for proj in p.projection.lookup(&body.projection_store) {
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
@ -156,7 +156,7 @@ fn moved_out_of_ref<'db>(
&& !infcx.type_is_copy_modulo_regions(env, ty)
&& !ty.references_non_lt_error()
{
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty });
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty: ty.store() });
}
}
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
@ -233,13 +233,13 @@ fn moved_out_of_ref<'db>(
fn partially_moved<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> Vec<PartiallyMoved<'db>> {
body: &MirBody,
) -> Vec<PartiallyMoved> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty<'db> = body.locals[p.local].ty;
let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
infcx,
@ -250,7 +250,7 @@ fn partially_moved<'db>(
);
}
if !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() {
result.push(PartiallyMoved { span, ty, local: p.local });
result.push(PartiallyMoved { span, ty: ty.store(), local: p.local });
}
}
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
@ -324,7 +324,7 @@ fn partially_moved<'db>(
result
}
fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec<BorrowRegion<'db>> {
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_revision_cancelled();
@ -332,7 +332,7 @@ fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec<Bor
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion<'db>| {
.and_modify(|it: &mut BorrowRegion| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
@ -377,12 +377,12 @@ enum ProjectionCase {
fn place_case<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
lvalue: &Place<'db>,
body: &MirBody,
lvalue: &Place,
) -> ProjectionCase {
let db = infcx.interner.db;
let mut is_part_of = false;
let mut ty = body.locals[lvalue.local].ty;
let mut ty = body.locals[lvalue.local].ty.as_ref();
for proj in lvalue.projection.lookup(&body.projection_store).iter() {
match proj {
ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw
@ -410,18 +410,18 @@ fn place_case<'db>(
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similar after initialization.
fn ever_initialized_map<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
) -> ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> {
let mut result: ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> =
fn ever_initialized_map(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
fn dfs<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
l: LocalId<'db>,
stack: &mut Vec<BasicBlockId<'db>>,
result: &mut ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>>,
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
l: LocalId,
stack: &mut Vec<BasicBlockId>,
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
) {
while let Some(b) = stack.pop() {
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
@ -509,11 +509,7 @@ fn ever_initialized_map<'db>(
result
}
fn push_mut_span<'db>(
local: LocalId<'db>,
span: MirSpan,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
@ -522,16 +518,13 @@ fn push_mut_span<'db>(
};
}
fn record_usage<'db>(local: LocalId<'db>, result: &mut ArenaMap<LocalId<'db>, MutabilityReason>) {
fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
if let it @ MutabilityReason::Unused = &mut result[local] {
*it = MutabilityReason::Not;
};
}
fn record_usage_for_operand<'db>(
arg: &Operand<'db>,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind {
record_usage(p.local, result);
}
@ -540,10 +533,10 @@ fn record_usage_for_operand<'db>(
fn mutability_of_locals<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> ArenaMap<LocalId<'db>, MutabilityReason> {
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let db = infcx.interner.db;
let mut result: ArenaMap<LocalId<'db>, MutabilityReason> =
let mut result: ArenaMap<LocalId, MutabilityReason> =
body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
let ever_init_maps = ever_initialized_map(db, body);

View file

@ -17,6 +17,7 @@ use hir_def::{
use hir_expand::{InFile, mod_path::path, name::Name};
use intern::sym;
use la_arena::ArenaMap;
use macros::GenericTypeVisitable;
use rustc_abi::TargetDataLayout;
use rustc_apfloat::{
Float,
@ -42,8 +43,8 @@ use crate::{
layout::{Layout, LayoutError, RustcEnumVariantIdx},
method_resolution::{is_dyn_method, lookup_impl_const},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind,
TypingMode, UnevaluatedConst, ValueConst,
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region,
StoredConst, StoredTy, Ty, TyKind, TypingMode, UnevaluatedConst, ValueConst,
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -83,7 +84,7 @@ macro_rules! not_supported {
};
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct VTableMap<'db> {
ty_to_id: FxHashMap<Ty<'db>, usize>,
id_to_ty: Vec<Ty<'db>>,
@ -150,16 +151,16 @@ impl TlsData {
}
}
struct StackFrame<'db> {
locals: Locals<'db>,
destination: Option<BasicBlockId<'db>>,
struct StackFrame {
locals: Locals,
destination: Option<BasicBlockId>,
prev_stack_ptr: usize,
span: (MirSpan, DefWithBodyId),
}
#[derive(Clone)]
enum MirOrDynIndex<'db> {
Mir(Arc<MirBody<'db>>),
enum MirOrDynIndex {
Mir(Arc<MirBody>),
Dyn(usize),
}
@ -169,7 +170,7 @@ pub struct Evaluator<'db> {
target_data_layout: Arc<TargetDataLayout>,
stack: Vec<u8>,
heap: Vec<u8>,
code_stack: Vec<StackFrame<'db>>,
code_stack: Vec<StackFrame>,
/// Stores the global location of the statics. We const evaluate every static first time we need it
/// and see it's missing, then we add it to this to reuse.
static_locations: FxHashMap<StaticId, Address>,
@ -182,13 +183,13 @@ pub struct Evaluator<'db> {
stdout: Vec<u8>,
stderr: Vec<u8>,
layout_cache: RefCell<FxHashMap<Ty<'db>, Arc<Layout>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem<'db>), Ty<'db>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem), Ty<'db>>>,
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex<'db>>>,
/// Constantly dropping and creating `Locals<'db>` is very costly. We store
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex>>,
/// Constantly dropping and creating `Locals` is very costly. We store
/// old locals that we normally want to drop here, to reuse their allocations
/// later.
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals<'db>>>>,
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
cached_ptr_size: usize,
cached_fn_trait_func: Option<FunctionId>,
cached_fn_mut_trait_func: Option<FunctionId>,
@ -261,7 +262,7 @@ impl<'db> IntervalAndTy<'db> {
addr: Address,
ty: Ty<'db>,
evaluator: &Evaluator<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, IntervalAndTy<'db>> {
let size = evaluator.size_of_sized(ty, locals, "type of interval")?;
Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
@ -340,22 +341,22 @@ impl Address {
}
#[derive(Clone, PartialEq, Eq)]
pub enum MirEvalError<'db> {
ConstEvalError(String, Box<ConstEvalError<'db>>),
LayoutError(LayoutError, Ty<'db>),
pub enum MirEvalError {
ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError, StoredTy),
TargetDataLayoutNotAvailable(TargetLoadError),
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error.
UndefinedBehavior(String),
Panic(String),
// FIXME: This should be folded into ConstEvalError?
MirLowerError(FunctionId, MirLowerError<'db>),
MirLowerErrorForClosure(InternedClosureId, MirLowerError<'db>),
TypeIsUnsized(Ty<'db>, &'static str),
MirLowerError(FunctionId, MirLowerError),
MirLowerErrorForClosure(InternedClosureId, MirLowerError),
TypeIsUnsized(StoredTy, &'static str),
NotSupported(String),
InvalidConst(Const<'db>),
InvalidConst(StoredConst),
InFunction(
Box<MirEvalError<'db>>,
Box<MirEvalError>,
Vec<(Either<FunctionId, InternedClosureId>, MirSpan, DefWithBodyId)>,
),
ExecutionLimitExceeded,
@ -363,12 +364,12 @@ pub enum MirEvalError<'db> {
/// FIXME: Fold this into InternalError
InvalidVTableId(usize),
/// ?
CoerceUnsizedError(Ty<'db>),
CoerceUnsizedError(StoredTy),
/// These should not occur, usually indicates a bug in mir lowering.
InternalError(Box<str>),
}
impl MirEvalError<'_> {
impl MirEvalError {
pub fn pretty_print(
&self,
f: &mut String,
@ -432,7 +433,9 @@ impl MirEvalError<'_> {
write!(
f,
"Layout for type `{}` is not available due {err:?}",
ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId)
ty.as_ref()
.display(db, display_target)
.with_closure_style(ClosureStyle::ClosureWithId)
)?;
}
MirEvalError::MirLowerError(func, err) => {
@ -495,7 +498,7 @@ impl MirEvalError<'_> {
}
}
impl std::fmt::Debug for MirEvalError<'_> {
impl std::fmt::Debug for MirEvalError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ConstEvalError(arg0, arg1) => {
@ -534,15 +537,15 @@ impl std::fmt::Debug for MirEvalError<'_> {
}
}
type Result<'db, T> = std::result::Result<T, MirEvalError<'db>>;
type Result<'db, T> = std::result::Result<T, MirEvalError>;
#[derive(Debug, Default)]
struct DropFlags<'db> {
need_drop: FxHashSet<Place<'db>>,
struct DropFlags {
need_drop: FxHashSet<Place>,
}
impl<'db> DropFlags<'db> {
fn add_place(&mut self, p: Place<'db>, store: &ProjectionStore<'db>) {
impl DropFlags {
fn add_place(&mut self, p: Place, store: &ProjectionStore) {
if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {
return;
}
@ -550,7 +553,7 @@ impl<'db> DropFlags<'db> {
self.need_drop.insert(p);
}
fn remove_place(&mut self, p: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {
// FIXME: replace parents with parts
if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {
self.need_drop.remove(&parent);
@ -565,10 +568,10 @@ impl<'db> DropFlags<'db> {
}
#[derive(Debug)]
struct Locals<'db> {
ptr: ArenaMap<LocalId<'db>, Interval>,
body: Arc<MirBody<'db>>,
drop_flags: DropFlags<'db>,
struct Locals {
ptr: ArenaMap<LocalId, Interval>,
body: Arc<MirBody>,
drop_flags: DropFlags,
}
pub struct MirOutput {
@ -587,7 +590,7 @@ impl MirOutput {
pub fn interpret_mir<'db>(
db: &'db dyn HirDatabase,
body: Arc<MirBody<'db>>,
body: Arc<MirBody>,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
// might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
@ -596,7 +599,7 @@ pub fn interpret_mir<'db>(
assert_placeholder_ty_is_unused: bool,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<'db, (Result<'db, Const<'db>>, MirOutput)> {
let ty = body.locals[return_slot()].ty;
let ty = body.locals[return_slot()].ty.as_ref();
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
let it: Result<'db, Const<'db>> = (|| {
if evaluator.ptr_size() != size_of::<usize>() {
@ -694,11 +697,11 @@ impl<'db> Evaluator<'db> {
self.infcx.interner.lang_items()
}
fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> {
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<'db, Address> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
fn place_interval(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
fn place_interval(&self, p: &Place, locals: &Locals) -> Result<'db, Interval> {
let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
Ok(Interval {
addr: place_addr_and_ty.0,
@ -714,7 +717,7 @@ impl<'db> Evaluator<'db> {
self.cached_ptr_size
}
fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> {
fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> {
let pair = (ty, proj);
if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
return *r;
@ -733,6 +736,7 @@ impl<'db> Evaluator<'db> {
.get(f)
.expect("broken closure field")
.ty
.get()
.instantiate(self.interner(), parent_subst)
},
self.crate_id,
@ -743,11 +747,11 @@ impl<'db> Evaluator<'db> {
fn place_addr_and_ty_and_metadata<'a>(
&'a self,
p: &Place<'db>,
locals: &'a Locals<'db>,
p: &Place,
locals: &'a Locals,
) -> Result<'db, (Address, Ty<'db>, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty<'db> = locals.body.locals[p.local].ty;
let mut ty: Ty<'db> = locals.body.locals[p.local].ty.as_ref();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in p.projection.lookup(&locals.body.projection_store) {
let prev_ty = ty;
@ -868,8 +872,8 @@ impl<'db> Evaluator<'db> {
}
let r = self
.db
.layout_of_ty(ty, self.param_env)
.map_err(|e| MirEvalError::LayoutError(e, ty))?;
.layout_of_ty(ty.store(), self.param_env.store())
.map_err(|e| MirEvalError::LayoutError(e, ty.store()))?;
self.layout_cache.borrow_mut().insert(ty, r.clone());
Ok(r)
}
@ -878,17 +882,17 @@ impl<'db> Evaluator<'db> {
self.layout(Ty::new_adt(self.interner(), adt, subst))
}
fn place_ty<'a>(&'a self, p: &Place<'db>, locals: &'a Locals<'db>) -> Result<'db, Ty<'db>> {
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<'db, Ty<'db>> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
}
fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'db>> {
fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<'db, Ty<'db>> {
Ok(match &o.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
OperandKind::Constant { konst: _, ty } => *ty,
OperandKind::Constant { konst: _, ty } => ty.as_ref(),
&OperandKind::Static(s) => {
let ty =
InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr];
let ty = InferenceResult::for_body(self.db, s.into())
.expr_ty(self.db.body(s.into()).body_expr);
Ty::new_ref(
self.interner(),
Region::new_static(self.interner()),
@ -901,8 +905,8 @@ impl<'db> Evaluator<'db> {
fn operand_ty_and_eval(
&mut self,
o: &Operand<'db>,
locals: &mut Locals<'db>,
o: &Operand,
locals: &mut Locals,
) -> Result<'db, IntervalAndTy<'db>> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
@ -912,7 +916,7 @@ impl<'db> Evaluator<'db> {
fn interpret_mir(
&mut self,
body: Arc<MirBody<'db>>,
body: Arc<MirBody>,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<'db, Interval> {
if let Some(it) = self.stack_depth_limit.checked_sub(1) {
@ -1076,8 +1080,8 @@ impl<'db> Evaluator<'db> {
fn fill_locals_for_body(
&mut self,
body: &MirBody<'db>,
locals: &mut Locals<'db>,
body: &MirBody,
locals: &mut Locals,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<'db, ()> {
let mut remain_args = body.param_locals.len();
@ -1100,9 +1104,9 @@ impl<'db> Evaluator<'db> {
fn create_locals_for_body(
&mut self,
body: &Arc<MirBody<'db>>,
body: &Arc<MirBody>,
destination: Option<Interval>,
) -> Result<'db, (Locals<'db>, usize)> {
) -> Result<'db, (Locals, usize)> {
let mut locals =
match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
None => Locals {
@ -1126,7 +1130,7 @@ impl<'db> Evaluator<'db> {
continue;
}
let (size, align) = self.size_align_of_sized(
it.ty,
it.ty.as_ref(),
&locals,
"no unsized local in extending stack",
)?;
@ -1149,11 +1153,7 @@ impl<'db> Evaluator<'db> {
Ok((locals, prev_stack_pointer))
}
fn eval_rvalue(
&mut self,
r: &Rvalue<'db>,
locals: &mut Locals<'db>,
) -> Result<'db, IntervalOrOwned> {
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<'db, IntervalOrOwned> {
use IntervalOrOwned::*;
Ok(match r {
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
@ -1445,7 +1445,7 @@ impl<'db> Evaluator<'db> {
Owned(result.to_le_bytes().to_vec())
}
Rvalue::Repeat(it, len) => {
let len = match try_const_usize(self.db, *len) {
let len = match try_const_usize(self.db, len.as_ref()) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
@ -1455,7 +1455,7 @@ impl<'db> Evaluator<'db> {
}
Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
Rvalue::ShallowInitBoxWithAlloc(ty) => {
let Some((size, align)) = self.size_align_of(*ty, locals)? else {
let Some((size, align)) = self.size_align_of(ty.as_ref(), locals)? else {
not_supported!("unsized box initialization");
};
let addr = self.heap_allocate(size, align)?;
@ -1477,7 +1477,7 @@ impl<'db> Evaluator<'db> {
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(*ty)?;
let layout = self.layout(ty.as_ref())?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1486,10 +1486,8 @@ impl<'db> Evaluator<'db> {
)?)
}
AggregateKind::Union(it, f) => {
let layout = self.layout_adt(
(*it).into(),
GenericArgs::new_from_iter(self.interner(), []),
)?;
let layout =
self.layout_adt((*it).into(), GenericArgs::empty(self.interner()))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
@ -1501,7 +1499,7 @@ impl<'db> Evaluator<'db> {
}
AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) =
self.layout_of_variant(*it, *subst, locals)?;
self.layout_of_variant(*it, subst.as_ref(), locals)?;
Owned(self.construct_with_layout(
size,
&variant_layout,
@ -1510,7 +1508,7 @@ impl<'db> Evaluator<'db> {
)?)
}
AggregateKind::Closure(ty) => {
let layout = self.layout(*ty)?;
let layout = self.layout(ty.as_ref())?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1537,7 +1535,7 @@ impl<'db> Evaluator<'db> {
PointerCast::Unsize => {
let current_ty = self.operand_ty(operand, locals)?;
let addr = self.eval_operand(operand, locals)?;
self.coerce_unsized(addr, current_ty, *target_ty)?
self.coerce_unsized(addr, current_ty, target_ty.as_ref())?
}
PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {
// This is no-op
@ -1556,8 +1554,11 @@ impl<'db> Evaluator<'db> {
let current_ty = self.operand_ty(operand, locals)?;
let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let dest_size =
self.size_of_sized(*target_ty, locals, "destination of int to int cast")?;
let dest_size = self.size_of_sized(
target_ty.as_ref(),
locals,
"destination of int to int cast",
)?;
Owned(current[0..dest_size].to_vec())
}
CastKind::FloatToInt => {
@ -1579,9 +1580,12 @@ impl<'db> Evaluator<'db> {
not_supported!("unstable floating point type f16 and f128");
}
};
let is_signed = matches!(target_ty.kind(), TyKind::Int(_));
let dest_size =
self.size_of_sized(*target_ty, locals, "destination of float to int cast")?;
let is_signed = matches!(target_ty.as_ref().kind(), TyKind::Int(_));
let dest_size = self.size_of_sized(
target_ty.as_ref(),
locals,
"destination of float to int cast",
)?;
let dest_bits = dest_size * 8;
let (max, min) = if dest_bits == 128 {
(i128::MAX, i128::MIN)
@ -1614,7 +1618,7 @@ impl<'db> Evaluator<'db> {
not_supported!("unstable floating point type f16 and f128");
}
};
let TyKind::Float(target_ty) = target_ty.kind() else {
let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {
not_supported!("invalid float to float cast");
};
match target_ty {
@ -1630,7 +1634,7 @@ impl<'db> Evaluator<'db> {
let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let value = i128::from_le_bytes(value);
let TyKind::Float(target_ty) = target_ty.kind() else {
let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {
not_supported!("invalid int to float cast");
};
match target_ty {
@ -1709,12 +1713,12 @@ impl<'db> Evaluator<'db> {
{
let field_types = self.db.field_types(struct_id.into());
if let Some(ty) =
field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst))
field_types.iter().last().map(|it| it.1.get().instantiate(self.interner(), subst))
{
return self.coerce_unsized_look_through_fields(ty, goal);
}
}
Err(MirEvalError::CoerceUnsizedError(ty))
Err(MirEvalError::CoerceUnsizedError(ty.store()))
}
fn coerce_unsized(
@ -1787,8 +1791,10 @@ impl<'db> Evaluator<'db> {
not_supported!("unsizing struct without field");
};
let target_last_field = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), target_subst);
let current_last_field = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), current_subst);
return self.unsizing_ptr_from_addr(
target_last_field,
@ -1806,7 +1812,7 @@ impl<'db> Evaluator<'db> {
&mut self,
it: VariantId,
subst: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, (usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
let adt = it.adt_id(self.db);
if let DefWithBodyId::VariantId(f) = locals.body.owner
@ -1900,11 +1906,7 @@ impl<'db> Evaluator<'db> {
Ok(result)
}
fn eval_operand(
&mut self,
it: &Operand<'db>,
locals: &mut Locals<'db>,
) -> Result<'db, Interval> {
fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<'db, Interval> {
Ok(match &it.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
locals.drop_flags.remove_place(p, &locals.body.projection_store);
@ -1914,14 +1916,16 @@ impl<'db> Evaluator<'db> {
let addr = self.eval_static(*st, locals)?;
Interval::new(addr, self.ptr_size())
}
OperandKind::Constant { konst, .. } => self.allocate_const_in_heap(locals, *konst)?,
OperandKind::Constant { konst, .. } => {
self.allocate_const_in_heap(locals, konst.as_ref())?
}
})
}
#[allow(clippy::double_parens)]
fn allocate_const_in_heap(
&mut self,
locals: &Locals<'db>,
locals: &Locals,
konst: Const<'db>,
) -> Result<'db, Interval> {
let result_owner;
@ -1971,7 +1975,7 @@ impl<'db> Evaluator<'db> {
} else if size < 16 && v.len() == 16 {
Cow::Borrowed(&v[0..size])
} else {
return Err(MirEvalError::InvalidConst(konst));
return Err(MirEvalError::InvalidConst(konst.store()));
}
} else {
Cow::Borrowed(v)
@ -1993,7 +1997,7 @@ impl<'db> Evaluator<'db> {
Ok(Interval::new(addr, size))
}
fn eval_place(&mut self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<'db, Interval> {
let addr = self.place_addr(p, locals)?;
Ok(Interval::new(
addr,
@ -2093,11 +2097,7 @@ impl<'db> Evaluator<'db> {
Ok(())
}
fn size_align_of(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
) -> Result<'db, Option<(usize, usize)>> {
fn size_align_of(&self, ty: Ty<'db>, locals: &Locals) -> Result<'db, Option<(usize, usize)>> {
if let Some(layout) = self.layout_cache.borrow().get(&ty) {
return Ok(layout
.is_sized()
@ -2126,12 +2126,12 @@ impl<'db> Evaluator<'db> {
fn size_of_sized(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
what: &'static str,
) -> Result<'db, usize> {
match self.size_align_of(ty, locals)? {
Some(it) => Ok(it.0),
None => Err(MirEvalError::TypeIsUnsized(ty, what)),
None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)),
}
}
@ -2140,12 +2140,12 @@ impl<'db> Evaluator<'db> {
fn size_align_of_sized(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
what: &'static str,
) -> Result<'db, (usize, usize)> {
match self.size_align_of(ty, locals)? {
Some(it) => Ok(it),
None => Err(MirEvalError::TypeIsUnsized(ty, what)),
None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)),
}
}
@ -2181,13 +2181,13 @@ impl<'db> Evaluator<'db> {
&self,
bytes: &[u8],
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, ComplexMemoryMap<'db>> {
fn rec<'db>(
this: &Evaluator<'db>,
bytes: &[u8],
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
mm: &mut ComplexMemoryMap<'db>,
stack_depth_limit: usize,
) -> Result<'db, ()> {
@ -2288,7 +2288,7 @@ impl<'db> Evaluator<'db> {
.fields
.offset(u32::from(f.into_raw()) as usize)
.bytes_usize();
let ty = field_types[f].instantiate(this.interner(), subst);
let ty = field_types[f].get().instantiate(this.interner(), subst);
let size = this.layout(ty)?.size.bytes_usize();
rec(
this,
@ -2314,7 +2314,7 @@ impl<'db> Evaluator<'db> {
for (f, _) in data.fields().iter() {
let offset =
l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
let ty = field_types[f].instantiate(this.interner(), subst);
let ty = field_types[f].get().instantiate(this.interner(), subst);
let size = this.layout(ty)?.size.bytes_usize();
rec(
this,
@ -2356,7 +2356,7 @@ impl<'db> Evaluator<'db> {
ty_of_bytes: impl Fn(&[u8]) -> Result<'db, Ty<'db>> + Copy,
addr: Address,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, ()> {
// FIXME: support indirect references
let layout = self.layout(ty)?;
@ -2389,7 +2389,7 @@ impl<'db> Evaluator<'db> {
AdtId::StructId(s) => {
for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.instantiate(self.interner(), args);
let ty = ty.get().instantiate(self.interner(), args);
self.patch_addresses(
patch_map,
ty_of_bytes,
@ -2410,7 +2410,7 @@ impl<'db> Evaluator<'db> {
) {
for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.instantiate(self.interner(), args);
let ty = ty.get().instantiate(self.interner(), args);
self.patch_addresses(
patch_map,
ty_of_bytes,
@ -2477,10 +2477,10 @@ impl<'db> Evaluator<'db> {
bytes: Interval,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
target_bb: Option<BasicBlockId<'db>>,
locals: &Locals,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?;
use rustc_type_ir::TyKind;
@ -2508,19 +2508,23 @@ impl<'db> Evaluator<'db> {
generic_args: GenericArgs<'db>,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let mir_body = self
.db
.monomorphized_mir_body_for_closure(closure, generic_args, self.param_env)
.monomorphized_mir_body_for_closure(
closure,
generic_args.store(),
self.param_env.store(),
)
.map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{
closure_data.addr.to_bytes().to_vec()
} else {
closure_data.get(self)?.to_owned()
};
let closure_data =
if mir_body.locals[mir_body.param_locals[0]].ty.as_ref().as_reference().is_some() {
closure_data.addr.to_bytes().to_vec()
} else {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|it| Ok(it.get(self)?.to_owned())))
.collect::<Result<'db, Vec<_>>>()?;
@ -2542,10 +2546,10 @@ impl<'db> Evaluator<'db> {
generic_args: GenericArgs<'db>,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
target_bb: Option<BasicBlockId<'db>>,
locals: &Locals,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
match def {
CallableDefId::FunctionId(def) => {
if self.detect_fn_trait(def).is_some() {
@ -2600,9 +2604,9 @@ impl<'db> Evaluator<'db> {
&self,
def: FunctionId,
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, MirOrDynIndex<'db>> {
) -> Result<'db, MirOrDynIndex> {
let pair = (def, generic_args);
if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
return Ok(r.clone());
@ -2621,7 +2625,7 @@ impl<'db> Evaluator<'db> {
let mir_body = self
.db
.monomorphized_mir_body(imp.into(), generic_args, self.param_env)
.monomorphized_mir_body(imp.into(), generic_args.store(), self.param_env.store())
.map_err(|e| {
MirEvalError::InFunction(
Box::new(MirEvalError::MirLowerError(imp, e)),
@ -2639,11 +2643,11 @@ impl<'db> Evaluator<'db> {
mut def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
if self.detect_and_exec_special_function(
def,
args,
@ -2705,14 +2709,14 @@ impl<'db> Evaluator<'db> {
fn exec_looked_up_function(
&mut self,
mir_body: Arc<MirBody<'db>>,
locals: &Locals<'db>,
mir_body: Arc<MirBody>,
locals: &Locals,
def: FunctionId,
arg_bytes: impl Iterator<Item = IntervalOrOwned>,
span: MirSpan,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
) -> Result<'db, Option<StackFrame<'db>>> {
target_bb: Option<BasicBlockId>,
) -> Result<'db, Option<StackFrame>> {
Ok(if let Some(target_bb) = target_bb {
let (mut locals, prev_stack_ptr) =
self.create_locals_for_body(&mir_body, Some(destination))?;
@ -2736,11 +2740,11 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let func = args
.first()
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
@ -2767,7 +2771,7 @@ impl<'db> Evaluator<'db> {
TyKind::Closure(closure, subst) => self.exec_closure(
closure.0,
func_data,
subst.split_closure_args_untupled().parent_args,
GenericArgs::new_from_slice(subst.split_closure_args_untupled().parent_args),
destination,
&args[1..],
locals,
@ -2805,7 +2809,7 @@ impl<'db> Evaluator<'db> {
}
}
fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Address> {
fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<'db, Address> {
if let Some(o) = self.static_locations.get(&st) {
return Ok(*o);
};
@ -2816,8 +2820,8 @@ impl<'db> Evaluator<'db> {
})?;
self.allocate_const_in_heap(locals, konst)?
} else {
let ty =
InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr];
let ty = InferenceResult::for_body(self.db, st.into())
.expr_ty(self.db.body(st.into()).body_expr);
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized extern static");
};
@ -2852,12 +2856,7 @@ impl<'db> Evaluator<'db> {
}
}
fn drop_place(
&mut self,
place: &Place<'db>,
locals: &mut Locals<'db>,
span: MirSpan,
) -> Result<'db, ()> {
fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<'db, ()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place, &locals.body.projection_store) {
return Ok(());
@ -2872,7 +2871,7 @@ impl<'db> Evaluator<'db> {
fn run_drop_glue_deep(
&mut self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
addr: Address,
_metadata: &[u8],
span: MirSpan,
@ -2886,7 +2885,7 @@ impl<'db> Evaluator<'db> {
return Ok(());
};
let generic_args = GenericArgs::new_from_iter(self.interner(), [ty.into()]);
let generic_args = GenericArgs::new_from_slice(&[ty.into()]);
if let Ok(MirOrDynIndex::Mir(body)) =
self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
{
@ -2920,7 +2919,9 @@ impl<'db> Evaluator<'db> {
.offset(u32::from(field.into_raw()) as usize)
.bytes_usize();
let addr = addr.offset(offset);
let ty = field_types[field].instantiate(self.interner(), subst);
let ty = field_types[field]
.get()
.instantiate(self.interner(), subst);
self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
}
}
@ -3011,7 +3012,7 @@ pub fn render_const_using_debug_impl<'db>(
let debug_fmt_fn_ptr = evaluator.vtable_map.id(Ty::new_fn_def(
evaluator.interner(),
CallableDefId::FunctionId(debug_fmt_fn).into(),
GenericArgs::new_from_iter(evaluator.interner(), [ty.into()]),
GenericArgs::new_from_slice(&[ty.into()]),
));
evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
// a3 = ::core::fmt::Arguments::new_v1(a1, a2)

View file

@ -52,7 +52,7 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, bool> {
@ -149,7 +149,7 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
self_ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, ()> {
@ -195,7 +195,7 @@ impl<'db> Evaluator<'db> {
self.exec_fn_with_args(
def,
args,
GenericArgs::new_from_iter(self.interner(), [self_ty.into()]),
GenericArgs::new_from_slice(&[self_ty.into()]),
locals,
destination,
None,
@ -212,7 +212,7 @@ impl<'db> Evaluator<'db> {
layout: Arc<Layout>,
addr: Address,
def: FunctionId,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, ()> {
@ -318,7 +318,7 @@ impl<'db> Evaluator<'db> {
it: EvalLangItem,
generic_args: GenericArgs<'db>,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, Vec<u8>> {
use EvalLangItem::*;
@ -390,7 +390,7 @@ impl<'db> Evaluator<'db> {
id: i64,
args: &[IntervalAndTy<'db>],
destination: Interval,
_locals: &Locals<'db>,
_locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
match id {
@ -421,7 +421,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, ()> {
match as_str {
@ -587,7 +587,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
needs_override: bool,
) -> Result<'db, bool> {
@ -1235,7 +1235,7 @@ impl<'db> Evaluator<'db> {
def,
&args,
// FIXME: wrong for manual impls of `FnOnce`
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
locals,
destination,
None,
@ -1369,7 +1369,7 @@ impl<'db> Evaluator<'db> {
&mut self,
ty: Ty<'db>,
metadata: Interval,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, (usize, usize)> {
Ok(match ty.kind() {
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
@ -1391,8 +1391,13 @@ impl<'db> Evaluator<'db> {
_ => not_supported!("unsized enum or union"),
};
let field_types = self.db.field_types(id.into());
let last_field_ty =
field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst);
let last_field_ty = field_types
.iter()
.next_back()
.unwrap()
.1
.get()
.instantiate(self.interner(), subst);
let sized_part_size =
layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
let sized_part_align = layout.align.bytes() as usize;
@ -1423,7 +1428,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
// We are a single threaded runtime with no UB checking and no optimization, so

View file

@ -35,6 +35,7 @@ impl<'db> Evaluator<'db> {
not_supported!("simd type with no field");
};
let field_ty = self.db.field_types(id.into())[first_field]
.get()
.instantiate(self.interner(), subst);
return Ok((fields.len(), field_ty));
}
@ -67,7 +68,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
_locals: &Locals<'db>,
_locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
match name {

View file

@ -15,7 +15,7 @@ use crate::{
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
crate::attach_db(db, || {
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
@ -39,11 +39,12 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
let body = db
.monomorphized_mir_body(
func_id.into(),
GenericArgs::new_from_iter(interner, []),
GenericArgs::empty(interner).store(),
crate::ParamEnvAndCrate {
param_env: db.trait_environment(func_id.into()),
krate: func_id.krate(db),
},
}
.store(),
)
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
@ -122,7 +123,7 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
fn check_error_with(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expect_err: impl FnOnce(MirEvalError<'_>) -> bool,
expect_err: impl FnOnce(MirEvalError) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
crate::attach_db(&db, || {

View file

@ -19,7 +19,7 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{Const as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _};
use span::{Edition, FileId};
use syntax::TextRange;
use triomphe::Arc;
@ -42,7 +42,8 @@ use crate::{
TupleFieldId, Ty, UnOp, VariantId, return_slot,
},
next_solver::{
Const, DbInterner, ParamConst, ParamEnv, Region, TyKind, TypingMode, UnevaluatedConst,
Const, DbInterner, ParamConst, ParamEnv, Region, StoredGenericArgs, StoredTy, TyKind,
TypingMode, UnevaluatedConst,
infer::{DbInternerInferExt, InferCtxt},
},
traits::FnTrait,
@ -56,39 +57,40 @@ mod pattern_matching;
mod tests;
#[derive(Debug, Clone)]
struct LoopBlocks<'db> {
begin: BasicBlockId<'db>,
struct LoopBlocks {
begin: BasicBlockId,
/// `None` for loops that are not terminating
end: Option<BasicBlockId<'db>>,
place: Place<'db>,
end: Option<BasicBlockId>,
place: Place,
drop_scope_index: usize,
}
#[derive(Debug, Clone, Default)]
struct DropScope<'db> {
struct DropScope {
/// locals, in order of definition (so we should run drop glues in reverse order)
locals: Vec<LocalId<'db>>,
locals: Vec<LocalId>,
}
struct MirLowerCtx<'a, 'db> {
result: MirBody<'db>,
result: MirBody,
owner: DefWithBodyId,
current_loop_blocks: Option<LoopBlocks<'db>>,
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks<'db>>,
discr_temp: Option<Place<'db>>,
current_loop_blocks: Option<LoopBlocks>,
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks>,
discr_temp: Option<Place>,
db: &'db dyn HirDatabase,
body: &'a Body,
infer: &'a InferenceResult<'db>,
infer: &'a InferenceResult,
types: &'db crate::next_solver::DefaultAny<'db>,
resolver: Resolver<'db>,
drop_scopes: Vec<DropScope<'db>>,
drop_scopes: Vec<DropScope>,
env: ParamEnv<'db>,
infcx: InferCtxt<'db>,
}
// FIXME: Make this smaller, its stored in database queries
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MirLowerError<'db> {
ConstEvalError(Box<str>, Box<ConstEvalError<'db>>),
pub enum MirLowerError {
ConstEvalError(Box<str>, Box<ConstEvalError>),
LayoutError(LayoutError),
IncompleteExpr,
IncompletePattern,
@ -98,9 +100,9 @@ pub enum MirLowerError<'db> {
RecordLiteralWithoutPath,
UnresolvedMethod(String),
UnresolvedField,
UnsizedTemporary(Ty<'db>),
UnsizedTemporary(StoredTy),
MissingFunctionDefinition(DefWithBodyId, ExprId),
TypeMismatch(TypeMismatch<'db>),
TypeMismatch(TypeMismatch),
HasErrors,
/// This should never happen. Type mismatch should catch everything.
TypeError(&'static str),
@ -113,11 +115,11 @@ pub enum MirLowerError<'db> {
LangItemNotFound,
MutatingRvalue,
UnresolvedLabel,
UnresolvedUpvar(Place<'db>),
UnresolvedUpvar(Place),
InaccessibleLocal,
// monomorphization errors:
GenericArgNotProvided(GenericParamId, GenericArgs<'db>),
GenericArgNotProvided(GenericParamId, StoredGenericArgs),
}
/// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves.
@ -126,9 +128,9 @@ impl DropScopeToken {
fn pop_and_drop<'db>(
self,
ctx: &mut MirLowerCtx<'_, 'db>,
current: BasicBlockId<'db>,
current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
std::mem::forget(self);
ctx.pop_drop_scope_internal(current, span)
}
@ -158,7 +160,7 @@ impl Drop for DropScopeToken {
// }
// }
impl MirLowerError<'_> {
impl MirLowerError {
pub fn pretty_print(
&self,
f: &mut String,
@ -190,8 +192,8 @@ impl MirLowerError<'_> {
MirLowerError::TypeMismatch(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db, display_target),
e.actual.display(db, display_target),
e.expected.as_ref().display(db, display_target),
e.actual.as_ref().display(db, display_target),
)?,
MirLowerError::GenericArgNotProvided(id, subst) => {
let param_name = match *id {
@ -211,7 +213,7 @@ impl MirLowerError<'_> {
param_name.unwrap_or(Name::missing()).display(db, display_target.edition)
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter() {
for g in subst.as_ref() {
write!(f, " {},", g.display(db, display_target))?;
}
writeln!(f, "]")?;
@ -254,13 +256,13 @@ macro_rules! implementation_error {
}};
}
impl From<LayoutError> for MirLowerError<'_> {
impl From<LayoutError> for MirLowerError {
fn from(value: LayoutError) -> Self {
MirLowerError::LayoutError(value)
}
}
impl MirLowerError<'_> {
impl MirLowerError {
fn unresolved_path(
db: &dyn HirDatabase,
p: &Path,
@ -273,14 +275,14 @@ impl MirLowerError<'_> {
}
}
type Result<'db, T> = std::result::Result<T, MirLowerError<'db>>;
type Result<'db, T> = std::result::Result<T, MirLowerError>;
impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn new(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &'a Body,
infer: &'a InferenceResult<'db>,
infer: &'a InferenceResult,
) -> Self {
let mut basic_blocks = Arena::new();
let start_block = basic_blocks.alloc(BasicBlock {
@ -289,7 +291,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
is_cleanup: false,
});
let locals = Arena::new();
let binding_locals: ArenaMap<BindingId, LocalId<'db>> = ArenaMap::new();
let binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new();
let mir = MirBody {
projection_store: ProjectionStore::default(),
basic_blocks,
@ -311,6 +313,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
db,
infer,
body,
types: crate::next_solver::default_types(db),
owner,
resolver,
current_loop_blocks: None,
@ -332,16 +335,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.infcx.interner.lang_items()
}
fn temp(
&mut self,
ty: Ty<'db>,
current: BasicBlockId<'db>,
span: MirSpan,
) -> Result<'db, LocalId<'db>> {
fn temp(&mut self, ty: Ty<'db>, current: BasicBlockId, span: MirSpan) -> Result<'db, LocalId> {
if matches!(ty.kind(), TyKind::Slice(_) | TyKind::Dynamic(..)) {
return Err(MirLowerError::UnsizedTemporary(ty));
return Err(MirLowerError::UnsizedTemporary(ty.store()));
}
let l = self.result.locals.alloc(Local { ty });
let l = self.result.locals.alloc(Local { ty: ty.store() });
self.push_storage_live_for_local(l, current, span)?;
Ok(l)
}
@ -349,8 +347,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_some_operand(
&mut self,
expr_id: ExprId,
current: BasicBlockId<'db>,
) -> Result<'db, Option<(Operand<'db>, BasicBlockId<'db>)>> {
current: BasicBlockId,
) -> Result<'db, Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id)
&& let Expr::Literal(l) = &self.body[expr_id]
{
@ -366,18 +364,14 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_with_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
current: BasicBlockId<'db>,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
current: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<'db, Option<BasicBlockId>> {
match adjustments.split_last() {
Some((last, rest)) => match &last.kind {
Adjust::NeverToAny => {
let temp = self.temp(
Ty::new(self.interner(), TyKind::Never),
current,
MirSpan::Unknown,
)?;
let temp = self.temp(self.types.types.never, current, MirSpan::Unknown)?;
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
}
Adjust::Deref(_) => {
@ -416,7 +410,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Rvalue::Cast(
CastKind::PointerCoercion(*cast),
Operand { kind: OperandKind::Copy(p), span: None },
last.target,
last.target.clone(),
),
expr_id.into(),
);
@ -430,11 +424,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_with_borrow_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
current: BasicBlockId<'db>,
rest: &[Adjustment<'db>],
place: Place,
current: BasicBlockId,
rest: &[Adjustment],
m: Mutability,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let Some((p, current)) =
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
else {
@ -448,9 +442,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place(
&mut self,
expr_id: ExprId,
place: Place<'db>,
prev_block: BasicBlockId<'db>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
prev_block: BasicBlockId,
) -> Result<'db, Option<BasicBlockId>> {
if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) {
return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments);
}
@ -460,9 +454,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_without_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
mut current: BasicBlockId<'db>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
mut current: BasicBlockId,
) -> Result<'db, Option<BasicBlockId>> {
match &self.body[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
@ -537,7 +531,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
const_id.into(),
current,
place,
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
expr_id.into(),
)?;
Ok(Some(current))
@ -545,7 +539,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
ValueNs::EnumVariantId(variant_id) => {
let variant_fields = variant_id.fields(self.db);
if variant_fields.shape == FieldsShape::Unit {
let ty = self.infer.type_of_expr[expr_id];
let ty = self.infer.expr_ty(expr_id);
current = self.lower_enum_variant(
variant_id,
current,
@ -575,8 +569,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
konst: Const::new_param(
self.interner(),
ParamConst { id: p, index },
),
ty: self.db.const_param_ty_ns(p),
)
.store(),
ty: self.db.const_param_ty_ns(p).store(),
},
span: None,
}),
@ -907,7 +902,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
operands[u32::from(field_id.into_raw()) as usize] = Some(op);
}
let rvalue = Rvalue::Aggregate(
AggregateKind::Adt(variant_id, subst),
AggregateKind::Adt(variant_id, subst.store()),
match spread_place {
Some(sp) => operands
.into_iter()
@ -978,15 +973,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
let rvalue = if self.infer.coercion_casts.contains(expr) {
Rvalue::Use(it)
} else {
let source_ty = self.infer[*expr];
let target_ty = self.infer[expr_id];
let source_ty = self.infer.expr_ty(*expr);
let target_ty = self.infer.expr_ty(expr_id);
let cast_kind = if source_ty.as_reference().is_some() {
CastKind::PointerCoercion(PointerCast::ArrayToPointer)
} else {
cast_kind(self.db, source_ty, target_ty)?
};
Rvalue::Cast(cast_kind, it, target_ty)
Rvalue::Cast(cast_kind, it, target_ty.store())
};
self.push_assignment(current, place, rvalue, expr_id.into());
Ok(Some(current))
@ -1004,7 +999,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
current,
place,
Rvalue::ShallowInitBoxWithAlloc(ty),
Rvalue::ShallowInitBoxWithAlloc(ty.store()),
expr_id.into(),
);
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
@ -1222,7 +1217,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
current,
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst),
AggregateKind::Adt(st.into(), subst.store()),
st.fields(self.db)
.fields()
.iter()
@ -1284,11 +1279,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let tmp_ty = capture.ty.instantiate_identity();
let tmp_ty = capture.ty.get().instantiate_identity();
// FIXME: Handle more than one span.
let capture_spans = capture.spans();
let tmp: Place<'db> =
self.temp(tmp_ty, current, capture_spans[0])?.into();
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
self.push_assignment(
current,
tmp,
@ -1305,7 +1299,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
current,
place,
Rvalue::Aggregate(AggregateKind::Closure(ty), operands.into()),
Rvalue::Aggregate(AggregateKind::Closure(ty.store()), operands.into()),
expr_id.into(),
);
Ok(Some(current))
@ -1325,7 +1319,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
return Ok(None);
};
let r = Rvalue::Aggregate(
AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)),
AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id).store()),
values,
);
self.push_assignment(current, place, r, expr_id.into());
@ -1355,7 +1349,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
else {
return Ok(None);
};
let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values);
let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty.store()), values);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
@ -1373,7 +1367,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
));
}
};
let r = Rvalue::Repeat(init, len);
let r = Rvalue::Repeat(init, len.store());
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
@ -1388,11 +1382,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn push_field_projection(
&mut self,
place: &mut Place<'db>,
expr_id: ExprId,
) -> Result<'db, ()> {
fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<'db, ()> {
if let Expr::Field { expr, name } = &self.body[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind() {
let index =
@ -1421,7 +1411,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
&mut self,
ty: Ty<'db>,
loc: &ExprId,
) -> Result<'db, Operand<'db>> {
) -> Result<'db, Operand> {
match &self.body[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
@ -1443,7 +1433,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
ResolveValueResult::ValueNs(v, _) => {
if let ValueNs::ConstId(c) = v {
self.lower_const_to_operand(
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
c.into(),
)
} else {
@ -1461,10 +1451,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand<'db>> {
fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand> {
let size = || {
self.db
.layout_of_ty(ty, ParamEnvAndCrate { param_env: self.env, krate: self.krate() })
.layout_of_ty(
ty.store(),
ParamEnvAndCrate { param_env: self.env, krate: self.krate() }.store(),
)
.map(|it| it.size.bytes_usize())
};
const USIZE_SIZE: usize = size_of::<usize>();
@ -1512,15 +1505,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty))
}
fn new_basic_block(&mut self) -> BasicBlockId<'db> {
fn new_basic_block(&mut self) -> BasicBlockId {
self.result.basic_blocks.alloc(BasicBlock::default())
}
fn lower_const(
&mut self,
const_id: GeneralConstId,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
subst: GenericArgs<'db>,
span: MirSpan,
) -> Result<'db, ()> {
@ -1533,8 +1526,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
&mut self,
subst: GenericArgs<'db>,
const_id: GeneralConstId,
) -> Result<'db, Operand<'db>> {
let konst = if subst.len() != 0 {
) -> Result<'db, Operand> {
let konst = if !subst.is_empty() {
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
Const::new_unevaluated(
self.interner(),
@ -1564,13 +1557,16 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
})
.unwrap()
.instantiate(self.interner(), subst);
Ok(Operand { kind: OperandKind::Constant { konst, ty }, span: None })
Ok(Operand {
kind: OperandKind::Constant { konst: konst.store(), ty: ty.store() },
span: None,
})
}
fn write_bytes_to_place(
&mut self,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
cv: Box<[u8]>,
ty: Ty<'db>,
span: MirSpan,
@ -1582,12 +1578,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_enum_variant(
&mut self,
variant_id: EnumVariantId,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
ty: Ty<'db>,
fields: Box<[Operand<'db>]>,
fields: Box<[Operand]>,
span: MirSpan,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let subst = match ty.kind() {
TyKind::Adt(_, subst) => subst,
_ => implementation_error!("Non ADT enum"),
@ -1595,7 +1591,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
prev_block,
place,
Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields),
Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst.store()), fields),
span,
);
Ok(prev_block)
@ -1603,13 +1599,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_call_and_args(
&mut self,
func: Operand<'db>,
func: Operand,
args: impl Iterator<Item = ExprId>,
place: Place<'db>,
mut current: BasicBlockId<'db>,
place: Place,
mut current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let Some(args) = args
.map(|arg| {
if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? {
@ -1628,13 +1624,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_call(
&mut self,
func: Operand<'db>,
args: Box<[Operand<'db>]>,
place: Place<'db>,
current: BasicBlockId<'db>,
func: Operand,
args: Box<[Operand]>,
place: Place,
current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let b = if is_uninhabited { None } else { Some(self.new_basic_block()) };
self.set_terminator(
current,
@ -1651,25 +1647,20 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(b)
}
fn is_unterminated(&mut self, source: BasicBlockId<'db>) -> bool {
fn is_unterminated(&mut self, source: BasicBlockId) -> bool {
self.result.basic_blocks[source].terminator.is_none()
}
fn set_terminator(
&mut self,
source: BasicBlockId<'db>,
terminator: TerminatorKind<'db>,
span: MirSpan,
) {
fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) {
self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator });
}
fn set_goto(&mut self, source: BasicBlockId<'db>, target: BasicBlockId<'db>, span: MirSpan) {
fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) {
self.set_terminator(source, TerminatorKind::Goto { target }, span);
}
fn expr_ty_without_adjust(&self, e: ExprId) -> Ty<'db> {
self.infer[e]
self.infer.expr_ty(e)
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
@ -1677,36 +1668,36 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
if let Some(it) = self.infer.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
ty = Some(it.target.as_ref());
}
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
}
fn push_statement(&mut self, block: BasicBlockId<'db>, statement: Statement<'db>) {
fn push_statement(&mut self, block: BasicBlockId, statement: Statement) {
self.result.basic_blocks[block].statements.push(statement);
}
fn push_fake_read(&mut self, block: BasicBlockId<'db>, p: Place<'db>, span: MirSpan) {
fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) {
self.push_statement(block, StatementKind::FakeRead(p).with_span(span));
}
fn push_assignment(
&mut self,
block: BasicBlockId<'db>,
place: Place<'db>,
rvalue: Rvalue<'db>,
block: BasicBlockId,
place: Place,
rvalue: Rvalue,
span: MirSpan,
) {
self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span));
}
fn discr_temp_place(&mut self, current: BasicBlockId<'db>) -> Place<'db> {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
Some(it) => *it,
None => {
// FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well
let discr_ty = Ty::new_int(self.interner(), rustc_type_ir::IntTy::I128);
let tmp: Place<'db> = self
let tmp: Place = self
.temp(discr_ty, current, MirSpan::Unknown)
.expect("discr_ty is never unsized")
.into();
@ -1718,12 +1709,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_loop(
&mut self,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
label: Option<LabelId>,
span: MirSpan,
f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId<'db>) -> Result<'db, ()>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId) -> Result<'db, ()>,
) -> Result<'db, Option<BasicBlockId>> {
let begin = self.new_basic_block();
let prev = self.current_loop_blocks.replace(LoopBlocks {
begin,
@ -1758,10 +1749,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn merge_blocks(
&mut self,
b1: Option<BasicBlockId<'db>>,
b2: Option<BasicBlockId<'db>>,
b1: Option<BasicBlockId>,
b2: Option<BasicBlockId>,
span: MirSpan,
) -> Option<BasicBlockId<'db>> {
) -> Option<BasicBlockId> {
match (b1, b2) {
(None, None) => None,
(None, Some(b)) | (Some(b), None) => Some(b),
@ -1774,7 +1765,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn current_loop_end(&mut self) -> Result<'db, BasicBlockId<'db>> {
fn current_loop_end(&mut self) -> Result<'db, BasicBlockId> {
let r = match self
.current_loop_blocks
.as_mut()
@ -1801,7 +1792,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
is_ty_uninhabited_from(
&self.infcx,
self.infer[expr_id],
self.infer.expr_ty(expr_id),
self.owner.module(self.db),
self.env,
)
@ -1809,15 +1800,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
/// `Drop` in the appropriated places.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId<'db>) -> Result<'db, ()> {
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<'db, ()> {
let l = self.binding_local(b)?;
self.push_storage_live_for_local(l, current, MirSpan::BindingId(b))
}
fn push_storage_live_for_local(
&mut self,
l: LocalId<'db>,
current: BasicBlockId<'db>,
l: LocalId,
current: BasicBlockId,
span: MirSpan,
) -> Result<'db, ()> {
self.drop_scopes.last_mut().unwrap().locals.push(l);
@ -1828,11 +1819,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_block_to_place(
&mut self,
statements: &[hir_def::hir::Statement],
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
tail: Option<ExprId>,
place: Place<'db>,
place: Place,
span: MirSpan,
) -> Result<'db, Option<Idx<BasicBlock<'db>>>> {
) -> Result<'db, Option<Idx<BasicBlock>>> {
let scope = self.push_drop_scope();
for statement in statements.iter() {
match statement {
@ -1908,11 +1899,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
params: impl Iterator<Item = (PatId, Ty<'db>)> + Clone,
self_binding: Option<(BindingId, Ty<'db>)>,
pick_binding: impl Fn(BindingId) -> bool,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let base_param_count = self.result.param_locals.len();
let self_binding = match self_binding {
Some((self_binding, ty)) => {
let local_id = self.result.locals.alloc(Local { ty });
let local_id = self.result.locals.alloc(Local { ty: ty.store() });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
self.result.binding_locals.insert(self_binding, local_id);
self.result.param_locals.push(local_id);
@ -1921,7 +1912,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
None => None,
};
self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty });
let local_id = self.result.locals.alloc(Local { ty: ty.store() });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it]
&& matches!(
@ -1939,9 +1930,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
continue;
}
if !self.result.binding_locals.contains_idx(id) {
self.result
.binding_locals
.insert(id, self.result.locals.alloc(Local { ty: self.infer[id] }));
self.result.binding_locals.insert(
id,
self.result.locals.alloc(Local { ty: self.infer.binding_ty(id).store() }),
);
}
}
let mut current = self.result.start_block;
@ -1976,7 +1968,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(current)
}
fn binding_local(&self, b: BindingId) -> Result<'db, LocalId<'db>> {
fn binding_local(&self, b: BindingId) -> Result<'db, LocalId> {
match self.result.binding_locals.get(b) {
Some(it) => Ok(*it),
None => {
@ -2025,9 +2017,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn drop_until_scope(
&mut self,
scope_index: usize,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span);
}
@ -2047,9 +2039,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
/// Don't call directly
fn pop_drop_scope_internal(
&mut self,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span);
current
@ -2057,9 +2049,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn pop_drop_scope_assert_finished(
&mut self,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
current = self.pop_drop_scope_internal(current, span);
if !self.drop_scopes.is_empty() {
implementation_error!("Mismatched count between drop scope push and pops");
@ -2069,12 +2061,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn emit_drop_and_storage_dead_for_scope(
&mut self,
scope: &DropScope<'db>,
current: &mut Idx<BasicBlock<'db>>,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
span: MirSpan,
) {
for &l in scope.locals.iter().rev() {
if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty) {
if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty.as_ref())
{
let prev = std::mem::replace(current, self.new_basic_block());
self.set_terminator(
prev,
@ -2112,36 +2105,37 @@ fn cast_kind<'db>(
pub fn mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
closure: InternedClosureId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
let body = db.body(owner);
let infer = InferenceResult::for_body(db, owner);
let Expr::Closure { args, body: root, .. } = &body[expr] else {
implementation_error!("closure expression is not closure");
};
let crate::next_solver::TyKind::Closure(_, substs) = infer[expr].kind() else {
let crate::next_solver::TyKind::Closure(_, substs) = infer.expr_ty(expr).kind() else {
implementation_error!("closure expression is not closure");
};
let (captures, kind) = infer.closure_info(closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root] });
ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() });
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer[expr],
FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer.expr_ty(expr),
FnTrait::FnMut | FnTrait::AsyncFnMut => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer[expr],
infer.expr_ty(expr),
Mutability::Mut,
),
FnTrait::Fn | FnTrait::AsyncFn => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer[expr],
infer.expr_ty(expr),
Mutability::Not,
),
},
}
.store(),
});
ctx.result.param_locals.push(closure_local);
let Some(sig) =
@ -2151,7 +2145,7 @@ pub fn mir_body_for_closure_query<'db>(
};
let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr);
let current = ctx.lower_params_and_bindings(
args.iter().zip(sig.skip_binder().inputs().iter()).map(|(it, y)| (*it, y)),
args.iter().zip(sig.skip_binder().inputs().iter()).map(|(it, y)| (*it, *y)),
None,
|_| true,
)?;
@ -2160,8 +2154,7 @@ pub fn mir_body_for_closure_query<'db>(
let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId<'db>, Vec<(&CapturedItem<'_>, usize)>> =
FxHashMap::default();
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
for (i, capture) in captures.iter().enumerate() {
let local = ctx.binding_local(capture.place.local)?;
upvar_map.entry(local).or_default().push((capture, i));
@ -2226,7 +2219,7 @@ pub fn mir_body_for_closure_query<'db>(
pub fn mir_body_query<'db>(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
let krate = def.krate(db);
let edition = krate.data(db).edition;
let detail = match def {
@ -2261,8 +2254,9 @@ pub fn mir_body_query<'db>(
pub(crate) fn mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
_: salsa::Id,
_def: DefWithBodyId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
Err(MirLowerError::Loop)
}
@ -2270,17 +2264,17 @@ pub fn lower_to_mir<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &Body,
infer: &InferenceResult<'db>,
infer: &InferenceResult,
// FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<'db, MirBody<'db>> {
) -> Result<'db, MirBody> {
if infer.type_mismatches().next().is_some() || infer.is_erroneous() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr).store() });
let binding_picker = |b: BindingId| {
let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
@ -2293,7 +2287,7 @@ pub fn lower_to_mir<'db>(
if let DefWithBodyId::FunctionId(fid) = owner {
let callable_sig =
db.callable_item_signature(fid.into()).instantiate_identity().skip_binder();
let mut params = callable_sig.inputs().iter();
let mut params = callable_sig.inputs().iter().copied();
let self_param = body.self_param.and_then(|id| Some((id, params.next()?)));
break 'b ctx.lower_params_and_bindings(
body.params.iter().zip(params).map(|(it, y)| (*it, y)),

View file

@ -20,8 +20,8 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_without_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId<'db>,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
prev_block: BasicBlockId,
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -35,12 +35,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_with_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId<'db>,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
prev_block: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let ty = adjustments
.last()
.map(|it| it.target)
.map(|it| it.target.as_ref())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -53,11 +53,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place_with_adjust(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
adjustments: &[Adjustment],
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
@ -93,9 +93,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
current,
r,
rest.last()
.map(|it| it.target)
.map(|it| it.target.as_ref())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target,
last.target.as_ref(),
expr_id.into(),
match od.0 {
Some(Mutability::Mut) => true,
@ -115,10 +115,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
match self.infer.expr_adjustments.get(&expr_id) {
Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
@ -127,10 +127,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place_without_adjust(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
@ -159,7 +159,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
ty,
Mutability::Not,
);
let temp: Place<'db> = self.temp(ref_ty, current, expr_id.into())?.into();
let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp,
@ -279,21 +279,21 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_overloaded_index(
&mut self,
current: BasicBlockId<'db>,
place: Place<'db>,
current: BasicBlockId,
place: Place,
base_ty: Ty<'db>,
result_ty: Ty<'db>,
index_operand: Operand<'db>,
index_operand: Operand,
span: MirSpan,
index_fn: (FunctionId, GenericArgs<'db>),
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let mutability = match base_ty.as_reference() {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref =
Ty::new_ref(self.interner(), Region::error(self.interner()), result_ty, mutability);
let mut result: Place<'db> = self.temp(result_ref, current, span)?.into();
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(index_fn.0).into(),
@ -316,13 +316,13 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_overloaded_deref(
&mut self,
current: BasicBlockId<'db>,
place: Place<'db>,
current: BasicBlockId,
place: Place,
source_ty: Ty<'db>,
target_ty: Ty<'db>,
span: MirSpan,
mutability: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let lang_items = self.lang_items();
let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
(
@ -342,7 +342,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
let error_region = Region::error(self.interner());
let ty_ref = Ty::new_ref(self.interner(), error_region, source_ty, mutability);
let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability);
let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into();
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?;
let deref_fn = deref_trait
@ -352,9 +352,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
let deref_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(deref_fn).into(),
GenericArgs::new_from_iter(self.interner(), [source_ty.into()]),
GenericArgs::new_from_slice(&[source_ty.into()]),
));
let mut result: Place<'db> = self.temp(target_ty_ref, current, span)?.into();
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
let Some(current) = self.lower_call(
deref_fn_op,
Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]),

View file

@ -1,7 +1,7 @@
//! MIR lowering for patterns
use hir_def::{hir::ExprId, signatures::VariantFields};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use crate::{
BindingMode,
@ -63,11 +63,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
/// so it should be an empty block.
pub(super) fn pattern_match(
&mut self,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: Place<'db>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: Place,
pattern: PatId,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
@ -87,10 +87,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn pattern_match_assignment(
&mut self,
current: BasicBlockId<'db>,
value: Place<'db>,
current: BasicBlockId,
value: Place,
pattern: PatId,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let (current, _) =
self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?;
Ok(current)
@ -99,9 +99,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn match_self_param(
&mut self,
id: BindingId,
current: BasicBlockId<'db>,
local: LocalId<'db>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
current: BasicBlockId,
local: LocalId,
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
self.pattern_match_binding(
id,
BindingMode::Move,
@ -114,12 +114,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_inner(
&mut self,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
mut cond_place: Place<'db>,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
mut cond_place: Place,
pattern: PatId,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default();
cond_place.projection = self.result.projection_store.intern(
cond_place
@ -135,7 +135,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
let subst = match self.infer[pattern].kind() {
let subst = match self.infer.pat_ty(pattern).kind() {
TyKind::Tuple(s) => s,
_ => {
return Err(MirLowerError::TypeError(
@ -209,10 +209,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
}
Pat::Range { start, end, range_type: _ } => {
let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> {
let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?;
let lv =
self.lower_literal_or_const_to_operand(self.infer.pat_ty(pattern), l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
let discr: Place<'db> =
let discr: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -249,9 +250,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Slice { prefix, slice, suffix } => {
if mode == MatchingMode::Check {
// emit runtime length check for slice
if let TyKind::Slice(_) = self.infer[pattern].kind() {
if let TyKind::Slice(_) = self.infer.pat_ty(pattern).kind() {
let pattern_len = prefix.len() + suffix.len();
let place_len: Place<'db> = self
let place_len: Place = self
.temp(Ty::new_usize(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
@ -285,7 +286,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
MemoryMap::default(),
Ty::new_usize(self.interner()),
);
let discr: Place<'db> = self
let discr: Place = self
.temp(Ty::new_bool(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
@ -398,15 +399,15 @@ impl<'db> MirLowerCtx<'_, 'db> {
break 'b (c, x.1);
}
if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
break 'b (c, GenericArgs::new_from_iter(self.interner(), []));
break 'b (c, GenericArgs::empty(self.interner()));
}
not_supported!("path in pattern position that is not const or variant")
};
let tmp: Place<'db> =
self.temp(self.infer[pattern], current, pattern.into())?.into();
let tmp: Place =
self.temp(self.infer.pat_ty(pattern), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(c.into(), current, tmp, subst, span)?;
let tmp2: Place<'db> =
let tmp2: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -434,7 +435,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern], l)?;
let c = self.lower_literal_to_operand(self.infer.pat_ty(pattern), l)?;
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
@ -506,11 +507,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
&mut self,
id: BindingId,
mode: BindingMode,
cond_place: Place<'db>,
cond_place: Place,
span: MirSpan,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
self.push_storage_live(id, current)?;
self.push_match_assignment(current, target_place, mode, cond_place, span);
@ -519,10 +520,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn push_match_assignment(
&mut self,
current: BasicBlockId<'db>,
target_place: LocalId<'db>,
current: BasicBlockId,
target_place: LocalId,
mode: BindingMode,
cond_place: Place<'db>,
cond_place: Place,
span: MirSpan,
) {
self.push_assignment(
@ -545,15 +546,15 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_const(
&mut self,
current_else: Option<BasicBlockId<'db>>,
current: BasicBlockId<'db>,
c: Operand<'db>,
cond_place: Place<'db>,
current_else: Option<BasicBlockId>,
current: BasicBlockId,
c: Operand,
cond_place: Place,
pattern: Idx<Pat>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let then_target = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
let discr: Place<'db> =
let discr: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -579,14 +580,14 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_matching_variant(
&mut self,
cond_place: Place<'db>,
cond_place: Place,
variant: VariantId,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
mut current_else: Option<BasicBlockId<'db>>,
mut current_else: Option<BasicBlockId>,
shape: AdtPatternShape<'_>,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
Ok(match variant {
VariantId::EnumVariantId(v) => {
if mode == MatchingMode::Check {
@ -635,11 +636,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
shape: AdtPatternShape<'_>,
variant_data: &VariantFields,
v: VariantId,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: &Place<'db>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
Ok(match shape {
AdtPatternShape::Record { args } => {
let it = args
@ -678,12 +679,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_adt(
&mut self,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
args: impl Iterator<Item = (PlaceElem<'db>, PatId)>,
cond_place: &Place<'db>,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
args: impl Iterator<Item = (PlaceElem, PatId)>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
for (proj, arg) in args {
let cond_place = cond_place.project(proj, &mut self.result.projection_store);
(current, current_else) =
@ -694,14 +695,14 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_tuple_like(
&mut self,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
args: &[PatId],
ellipsis: Option<u32>,
fields: impl DoubleEndedIterator<Item = PlaceElem<'db>> + Clone,
cond_place: &Place<'db>,
fields: impl DoubleEndedIterator<Item = PlaceElem> + Clone,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let it = al
.iter()

View file

@ -8,7 +8,7 @@
//! So the monomorphization should be called even if the substitution is empty.
use hir_def::DefWithBodyId;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use rustc_type_ir::inherent::IntoKind;
use rustc_type_ir::{
FallibleTypeFolder, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeVisitableExt,
};
@ -16,7 +16,8 @@ use triomphe::Arc;
use crate::{
ParamEnvAndCrate,
next_solver::{Const, ConstKind, Region, RegionKind},
next_solver::{Const, ConstKind, Region, RegionKind, StoredConst, StoredGenericArgs, StoredTy},
traits::StoredParamEnvAndCrate,
};
use crate::{
db::{HirDatabase, InternedClosureId},
@ -37,7 +38,7 @@ struct Filler<'db> {
}
impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
type Error = MirLowerError<'db>;
type Error = MirLowerError;
fn cx(&self) -> DbInterner<'db> {
self.infcx.interner
@ -69,7 +70,7 @@ impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
.get(param.index as usize)
.and_then(|arg| arg.ty())
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)
MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store())
})?),
_ => ty.try_super_fold_with(self),
}
@ -79,22 +80,18 @@ impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
let ConstKind::Param(param) = ct.kind() else {
return ct.try_super_fold_with(self);
};
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.konst())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.konst()).ok_or_else(
|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()),
)
}
fn try_fold_region(&mut self, region: Region<'db>) -> Result<Region<'db>, Self::Error> {
let RegionKind::ReEarlyParam(param) = region.kind() else {
return Ok(region);
};
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.region())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.region()).ok_or_else(
|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()),
)
}
}
@ -105,33 +102,50 @@ impl<'db> Filler<'db> {
Self { infcx, trait_env: env, subst }
}
fn fill<T: TypeFoldable<DbInterner<'db>> + Copy>(
&mut self,
t: &mut T,
) -> Result<(), MirLowerError<'db>> {
fn fill_ty(&mut self, t: &mut StoredTy) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.try_fold_with(self)?;
if references_non_lt_error(t) {
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_operand(&mut self, op: &mut Operand<'db>) -> Result<(), MirLowerError<'db>> {
fn fill_const(&mut self, t: &mut StoredConst) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_args(&mut self, t: &mut StoredGenericArgs) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
match &mut op.kind {
OperandKind::Constant { konst, ty } => {
self.fill(konst)?;
self.fill(ty)?;
self.fill_const(konst)?;
self.fill_ty(ty)?;
}
OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (),
}
Ok(())
}
fn fill_body(&mut self, body: &mut MirBody<'db>) -> Result<(), MirLowerError<'db>> {
fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> {
for (_, l) in body.locals.iter_mut() {
self.fill(&mut l.ty)?;
self.fill_ty(&mut l.ty)?;
}
for (_, bb) in body.basic_blocks.iter_mut() {
for statement in &mut bb.statements {
@ -144,20 +158,20 @@ impl<'db> Filler<'db> {
match ak {
super::AggregateKind::Array(ty)
| super::AggregateKind::Tuple(ty)
| super::AggregateKind::Closure(ty) => self.fill(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill(subst)?,
| super::AggregateKind::Closure(ty) => self.fill_ty(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill_args(subst)?,
super::AggregateKind::Union(_, _) => (),
}
}
Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => {
self.fill(ty)?;
self.fill_ty(ty)?;
}
Rvalue::Use(op) => {
self.fill_operand(op)?;
}
Rvalue::Repeat(op, len) => {
self.fill_operand(op)?;
self.fill(len)?;
self.fill_const(len)?;
}
Rvalue::Ref(_, _)
| Rvalue::Len(_)
@ -208,35 +222,36 @@ impl<'db> Filler<'db> {
}
}
pub fn monomorphized_mir_body_query<'db>(
db: &'db dyn HirDatabase,
pub fn monomorphized_mir_body_query(
db: &dyn HirDatabase,
owner: DefWithBodyId,
subst: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
subst: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref());
let body = db.mir_body(owner)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
pub(crate) fn monomorphized_mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
pub(crate) fn monomorphized_mir_body_cycle_result(
_db: &dyn HirDatabase,
_: salsa::Id,
_: DefWithBodyId,
_: GenericArgs<'db>,
_: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
_: StoredGenericArgs,
_: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
Err(MirLowerError::Loop)
}
pub fn monomorphized_mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
closure: InternedClosureId,
subst: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
subst: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref());
let body = db.mir_body_for_closure(closure)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;

View file

@ -36,8 +36,8 @@ macro_rules! wln {
};
}
impl<'db> MirBody<'db> {
pub fn pretty_print(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> String {
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target);
ctx.for_body(|this| match ctx.body.owner {
@ -80,7 +80,7 @@ impl<'db> MirBody<'db> {
// String with lines is rendered poorly in `dbg` macros, which I use very much, so this
// function exists to solve that.
pub fn dbg(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
pub fn dbg(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
struct StringDbg(String);
impl Debug for StringDbg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -92,12 +92,12 @@ impl<'db> MirBody<'db> {
}
struct MirPrettyCtx<'a, 'db> {
body: &'a MirBody<'db>,
body: &'a MirBody,
hir_body: &'a Body,
db: &'db dyn HirDatabase,
result: String,
indent: String,
local_to_binding: ArenaMap<LocalId<'db>, BindingId>,
local_to_binding: ArenaMap<LocalId, BindingId>,
display_target: DisplayTarget,
}
@ -113,12 +113,12 @@ impl Write for MirPrettyCtx<'_, '_> {
}
}
enum LocalName<'db> {
Unknown(LocalId<'db>),
Binding(Name, LocalId<'db>),
enum LocalName {
Unknown(LocalId),
Binding(Name, LocalId),
}
impl<'db> HirDisplay<'db> for LocalName<'db> {
impl<'db> HirDisplay<'db> for LocalName {
fn hir_fmt(
&self,
f: &mut crate::display::HirFormatter<'_, 'db>,
@ -179,7 +179,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
fn new(
body: &'a MirBody<'db>,
body: &'a MirBody,
hir_body: &'a Body,
db: &'db dyn HirDatabase,
display_target: DisplayTarget,
@ -211,19 +211,19 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
self,
"let {}: {};",
self.local_name(id).display_test(self.db, self.display_target),
self.hir_display(&local.ty)
self.hir_display(&local.ty.as_ref())
);
}
}
fn local_name(&self, local: LocalId<'db>) -> LocalName<'db> {
fn local_name(&self, local: LocalId) -> LocalName {
match self.local_to_binding.get(local) {
Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}
fn basic_block_id(&self, basic_block_id: BasicBlockId<'db>) -> String {
fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
format!("'bb{}", u32::from(basic_block_id.into_raw()))
}
@ -311,12 +311,8 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
}
fn place(&mut self, p: &Place<'db>) {
fn f<'db>(
this: &mut MirPrettyCtx<'_, 'db>,
local: LocalId<'db>,
projections: &[PlaceElem<'db>],
) {
fn place(&mut self, p: &Place) {
fn f<'db>(this: &mut MirPrettyCtx<'_, 'db>, local: LocalId, projections: &[PlaceElem]) {
let Some((last, head)) = projections.split_last() else {
// no projection
w!(this, "{}", this.local_name(local).display_test(this.db, this.display_target));
@ -376,19 +372,21 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
f(self, p.local, p.projection.lookup(&self.body.projection_store));
}
fn operand(&mut self, r: &Operand<'db>) {
fn operand(&mut self, r: &Operand) {
match &r.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
// MIR at the time of writing doesn't have difference between move and copy, so we show them
// equally. Feel free to change it.
self.place(p);
}
OperandKind::Constant { konst, .. } => w!(self, "Const({})", self.hir_display(konst)),
OperandKind::Constant { konst, .. } => {
w!(self, "Const({})", self.hir_display(&konst.as_ref()))
}
OperandKind::Static(s) => w!(self, "Static({:?})", s),
}
}
fn rvalue(&mut self, r: &Rvalue<'db>) {
fn rvalue(&mut self, r: &Rvalue) {
match r {
Rvalue::Use(op) => self.operand(op),
Rvalue::Ref(r, p) => {
@ -415,7 +413,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
Rvalue::Repeat(op, len) => {
w!(self, "[");
self.operand(op);
w!(self, "; {}]", len.display_test(self.db, self.display_target));
w!(self, "; {}]", len.as_ref().display_test(self.db, self.display_target));
}
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
w!(self, "Adt(");
@ -440,7 +438,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
Rvalue::Cast(ck, op, ty) => {
w!(self, "Cast({ck:?}, ");
self.operand(op);
w!(self, ", {})", self.hir_display(ty));
w!(self, ", {})", self.hir_display(&ty.as_ref()));
}
Rvalue::CheckedBinaryOp(b, o1, o2) => {
self.operand(o1);
@ -478,7 +476,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
}
fn operand_list(&mut self, it: &[Operand<'db>]) {
fn operand_list(&mut self, it: &[Operand]) {
let mut it = it.iter();
if let Some(first) = it.next() {
self.operand(first);

View file

@ -1,9 +1,15 @@
//! Things relevant to the next trait solver.
// Note: in interned types defined in this module, we generally treat the lifetime as advisory
// and transmute it as needed. This is because no real memory unsafety can be caused from an
// incorrect lifetime here.
pub mod abi;
mod binder;
mod consts;
mod def_id;
pub mod fold;
pub mod format_proof_tree;
pub mod fulfill;
mod generic_arg;
pub mod generics;
@ -21,6 +27,9 @@ mod structural_normalize;
mod ty;
pub mod util;
use std::{mem::ManuallyDrop, sync::OnceLock};
pub use binder::*;
pub use consts::*;
pub use def_id::*;
pub use generic_arg::*;
@ -31,6 +40,7 @@ pub use region::*;
pub use solver::*;
pub use ty::*;
use crate::db::HirDatabase;
pub use crate::lower::ImplTraitIdx;
pub use rustc_ast_ir::Mutability;
@ -47,3 +57,225 @@ pub type TypingMode<'db> = rustc_type_ir::TypingMode<DbInterner<'db>>;
pub type TypeError<'db> = rustc_type_ir::error::TypeError<DbInterner<'db>>;
pub type QueryResult<'db> = rustc_type_ir::solve::QueryResult<DbInterner<'db>>;
pub type FxIndexMap<K, V> = rustc_type_ir::data_structures::IndexMap<K, V>;
pub struct DefaultTypes<'db> {
pub usize: Ty<'db>,
pub u8: Ty<'db>,
pub u16: Ty<'db>,
pub u32: Ty<'db>,
pub u64: Ty<'db>,
pub u128: Ty<'db>,
pub isize: Ty<'db>,
pub i8: Ty<'db>,
pub i16: Ty<'db>,
pub i32: Ty<'db>,
pub i64: Ty<'db>,
pub i128: Ty<'db>,
pub f16: Ty<'db>,
pub f32: Ty<'db>,
pub f64: Ty<'db>,
pub f128: Ty<'db>,
pub unit: Ty<'db>,
pub bool: Ty<'db>,
pub char: Ty<'db>,
pub str: Ty<'db>,
pub never: Ty<'db>,
pub error: Ty<'db>,
/// `&'static str`
pub static_str_ref: Ty<'db>,
/// `*mut ()`
pub mut_unit_ptr: Ty<'db>,
}
pub struct DefaultConsts<'db> {
pub error: Const<'db>,
}
pub struct DefaultRegions<'db> {
pub error: Region<'db>,
pub statik: Region<'db>,
pub erased: Region<'db>,
}
pub struct DefaultEmpty<'db> {
pub tys: Tys<'db>,
pub generic_args: GenericArgs<'db>,
pub bound_var_kinds: BoundVarKinds<'db>,
pub canonical_vars: CanonicalVars<'db>,
pub variances: VariancesOf<'db>,
pub pat_list: PatList<'db>,
pub predefined_opaques: PredefinedOpaques<'db>,
pub def_ids: SolverDefIds<'db>,
pub bound_existential_predicates: BoundExistentialPredicates<'db>,
pub clauses: Clauses<'db>,
pub region_assumptions: RegionAssumptions<'db>,
}
pub struct DefaultAny<'db> {
pub types: DefaultTypes<'db>,
pub consts: DefaultConsts<'db>,
pub regions: DefaultRegions<'db>,
pub empty: DefaultEmpty<'db>,
/// `[Invariant]`
pub one_invariant: VariancesOf<'db>,
/// `[Covariant]`
pub one_covariant: VariancesOf<'db>,
/// `for<'env>`
pub coroutine_captures_by_ref_bound_var_kinds: BoundVarKinds<'db>,
}
impl std::fmt::Debug for DefaultAny<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DefaultAny").finish_non_exhaustive()
}
}
#[inline]
pub fn default_types<'a, 'db>(db: &'db dyn HirDatabase) -> &'a DefaultAny<'db> {
static TYPES: OnceLock<DefaultAny<'static>> = OnceLock::new();
let interner = DbInterner::new_no_crate(db);
TYPES.get_or_init(|| {
let create_ty = |kind| {
let ty = Ty::new(interner, kind);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_const = |kind| {
let ty = Const::new(interner, kind);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_region = |kind| {
let ty = Region::new(interner, kind);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_generic_args = |slice| {
let ty = GenericArgs::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_bound_var_kinds = |slice| {
let ty = BoundVarKinds::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_canonical_vars = |slice| {
let ty = CanonicalVars::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_variances_of = |slice| {
let ty = VariancesOf::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_pat_list = |slice| {
let ty = PatList::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_predefined_opaques = |slice| {
let ty = PredefinedOpaques::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_solver_def_ids = |slice| {
let ty = SolverDefIds::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_bound_existential_predicates = |slice| {
let ty = BoundExistentialPredicates::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_clauses = |slice| {
let ty = Clauses::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_region_assumptions = |slice| {
let ty = RegionAssumptions::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let create_tys = |slice| {
let ty = Tys::new_from_slice(slice);
// We need to increase the refcount (forever), so that the types won't be freed.
let ty = ManuallyDrop::new(ty.store());
ty.as_ref()
};
let str = create_ty(TyKind::Str);
let statik = create_region(RegionKind::ReStatic);
let empty_tys = create_tys(&[]);
let unit = create_ty(TyKind::Tuple(empty_tys));
DefaultAny {
types: DefaultTypes {
usize: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::Usize)),
u8: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U8)),
u16: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U16)),
u32: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U32)),
u64: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U64)),
u128: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U128)),
isize: create_ty(TyKind::Int(rustc_ast_ir::IntTy::Isize)),
i8: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I8)),
i16: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I16)),
i32: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I32)),
i64: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I64)),
i128: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I128)),
f16: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F16)),
f32: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F32)),
f64: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F64)),
f128: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F128)),
unit,
bool: create_ty(TyKind::Bool),
char: create_ty(TyKind::Char),
str,
never: create_ty(TyKind::Never),
error: create_ty(TyKind::Error(ErrorGuaranteed)),
static_str_ref: create_ty(TyKind::Ref(statik, str, rustc_ast_ir::Mutability::Not)),
mut_unit_ptr: create_ty(TyKind::RawPtr(unit, rustc_ast_ir::Mutability::Mut)),
},
consts: DefaultConsts { error: create_const(ConstKind::Error(ErrorGuaranteed)) },
regions: DefaultRegions {
error: create_region(RegionKind::ReError(ErrorGuaranteed)),
statik,
erased: create_region(RegionKind::ReErased),
},
empty: DefaultEmpty {
tys: empty_tys,
generic_args: create_generic_args(&[]),
bound_var_kinds: create_bound_var_kinds(&[]),
canonical_vars: create_canonical_vars(&[]),
variances: create_variances_of(&[]),
pat_list: create_pat_list(&[]),
predefined_opaques: create_predefined_opaques(&[]),
def_ids: create_solver_def_ids(&[]),
bound_existential_predicates: create_bound_existential_predicates(&[]),
clauses: create_clauses(&[]),
region_assumptions: create_region_assumptions(&[]),
},
one_invariant: create_variances_of(&[rustc_type_ir::Variance::Invariant]),
one_covariant: create_variances_of(&[rustc_type_ir::Variance::Covariant]),
coroutine_captures_by_ref_bound_var_kinds: create_bound_var_kinds(&[
BoundVarKind::Region(BoundRegionKind::ClosureEnv),
]),
}
})
}

View file

@ -0,0 +1,83 @@
use crate::{
FnAbi,
next_solver::{
Binder, Clauses, EarlyBinder, FnSig, PolyFnSig, StoredBoundVarKinds, StoredClauses,
StoredTy, StoredTys, Ty, abi::Safety,
},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StoredEarlyBinder<T>(T);
impl<T> StoredEarlyBinder<T> {
#[inline]
pub fn bind(value: T) -> Self {
Self(value)
}
#[inline]
pub fn skip_binder(self) -> T {
self.0
}
#[inline]
pub fn as_ref(&self) -> StoredEarlyBinder<&T> {
StoredEarlyBinder(&self.0)
}
#[inline]
pub fn get_with<'db, 'a, R>(&'a self, f: impl FnOnce(&'a T) -> R) -> EarlyBinder<'db, R> {
EarlyBinder::bind(f(&self.0))
}
}
impl StoredEarlyBinder<StoredTy> {
#[inline]
pub fn get<'db>(&self) -> EarlyBinder<'db, Ty<'db>> {
self.get_with(|it| it.as_ref())
}
}
impl StoredEarlyBinder<StoredClauses> {
#[inline]
pub fn get<'db>(&self) -> EarlyBinder<'db, Clauses<'db>> {
self.get_with(|it| it.as_ref())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StoredPolyFnSig {
bound_vars: StoredBoundVarKinds,
inputs_and_output: StoredTys,
c_variadic: bool,
safety: Safety,
abi: FnAbi,
}
impl StoredPolyFnSig {
#[inline]
pub fn new(sig: PolyFnSig<'_>) -> Self {
let bound_vars = sig.bound_vars().store();
let sig = sig.skip_binder();
Self {
bound_vars,
inputs_and_output: sig.inputs_and_output.store(),
c_variadic: sig.c_variadic,
safety: sig.safety,
abi: sig.abi,
}
}
#[inline]
pub fn get(&self) -> PolyFnSig<'_> {
Binder::bind_with_vars(
FnSig {
inputs_and_output: self.inputs_and_output.as_ref(),
c_variadic: self.c_variadic,
safety: self.safety,
abi: self.abi,
},
self.bound_vars.as_ref(),
)
}
}

View file

@ -3,19 +3,20 @@
use std::hash::Hash;
use hir_def::ConstParamId;
use macros::{TypeFoldable, TypeVisitable};
use intern::{Interned, InternedRef, impl_internable};
use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable};
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, InferConst,
TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
WithCachedTypeInfo,
BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags,
GenericTypeVisitable, InferConst, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike},
relate::Relate,
};
use crate::{
MemoryMap,
next_solver::{ClauseKind, ParamEnv, interner::InternedWrapperNoDebug},
next_solver::{ClauseKind, ParamEnv, impl_stored_interned},
};
use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty};
@ -23,34 +24,47 @@ use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder,
pub type ConstKind<'db> = rustc_type_ir::ConstKind<DbInterner<'db>>;
pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst<DbInterner<'db>>;
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Const<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>>,
pub(super) interned: InternedRef<'db, ConstInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct ConstInterned(pub(super) WithCachedTypeInfo<ConstKind<'static>>);
impl_internable!(gc; ConstInterned);
impl_stored_interned!(ConstInterned, Const, StoredConst);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Const<'static>>();
};
impl<'db> Const<'db> {
pub fn new(interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self {
pub fn new(_interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<ConstKind<'db>, ConstKind<'static>>(kind) };
let flags = FlagComputation::for_const_kind(&kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Const::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(ConstInterned(cached)) }
}
pub fn inner(&self) -> &WithCachedTypeInfo<ConstKind<'db>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<ConstKind<'static>>,
&WithCachedTypeInfo<ConstKind<'db>>,
>(inner)
}
}
pub fn error(interner: DbInterner<'db>) -> Self {
Const::new(interner, ConstKind::Error(ErrorGuaranteed))
interner.default_types().consts.error
}
pub fn new_param(interner: DbInterner<'db>, param: ParamConst) -> Self {
@ -106,12 +120,6 @@ impl<'db> std::fmt::Debug for Const<'db> {
}
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.internee.fmt(f)
}
}
pub type PlaceholderConst = Placeholder<BoundConst>;
#[derive(Copy, Clone, Hash, Eq, PartialEq)]
@ -164,7 +172,9 @@ impl ParamConst {
/// A type-level constant value.
///
/// Represents a typed, fully evaluated constant.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable)]
#[derive(
Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable, GenericTypeVisitable,
)]
pub struct ValueConst<'db> {
pub ty: Ty<'db>,
// FIXME: Should we ignore this for TypeVisitable, TypeFoldable?
@ -190,7 +200,7 @@ impl<'db> rustc_type_ir::inherent::ValueConst<DbInterner<'db>> for ValueConst<'d
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct ConstBytes<'db> {
pub memory: Box<[u8]>,
pub memory_map: MemoryMap<'db>,
@ -202,31 +212,52 @@ impl Hash for ConstBytes<'_> {
}
}
#[salsa::interned(constructor = new_, debug)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Valtree<'db> {
#[returns(ref)]
bytes_: ConstBytes<'db>,
interned: InternedRef<'db, ValtreeInterned>,
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Valtree<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.inner().generic_visit_with(visitor);
}
}
}
#[derive(Debug, PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct ValtreeInterned(ConstBytes<'static>);
impl_internable!(gc; ValtreeInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Valtree<'static>>();
};
impl<'db> Valtree<'db> {
#[inline]
pub fn new(bytes: ConstBytes<'db>) -> Self {
crate::with_attached_db(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
})
let bytes = unsafe { std::mem::transmute::<ConstBytes<'db>, ConstBytes<'static>>(bytes) };
Self { interned: Interned::new_gc(ValtreeInterned(bytes)) }
}
#[inline]
pub fn inner(&self) -> &ConstBytes<'db> {
crate::with_attached_db(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&ConstBytes<'static>, &ConstBytes<'db>>(inner) }
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
impl std::fmt::Debug for Valtree<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.interned.fmt(f)
}
}
#[derive(
Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable,
)]
pub struct ExprConst;
impl rustc_type_ir::inherent::ParamLike for ParamConst {
@ -243,6 +274,14 @@ impl<'db> IntoKind for Const<'db> {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Const<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Const<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
@ -382,8 +421,8 @@ impl<'db> rustc_type_ir::inherent::Const<DbInterner<'db>> for Const<'db> {
Const::new(interner, ConstKind::Expr(expr))
}
fn new_error(interner: DbInterner<'db>, guar: ErrorGuaranteed) -> Self {
Const::new(interner, ConstKind::Error(guar))
fn new_error(interner: DbInterner<'db>, _guar: ErrorGuaranteed) -> Self {
Const::error(interner)
}
}

View file

@ -0,0 +1,93 @@
use rustc_type_ir::{solve::GoalSource, solve::inspect::GoalEvaluation};
use serde_derive::{Deserialize, Serialize};
use crate::next_solver::infer::InferCtxt;
use crate::next_solver::inspect::{InspectCandidate, InspectGoal};
use crate::next_solver::{DbInterner, Span};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProofTreeData {
pub goal: String,
pub result: String,
pub depth: usize,
pub candidates: Vec<CandidateData>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CandidateData {
pub kind: String,
pub result: String,
pub impl_header: Option<String>,
pub nested_goals: Vec<ProofTreeData>,
}
pub fn dump_proof_tree_structured<'db>(
proof_tree: GoalEvaluation<DbInterner<'db>>,
_span: Span,
infcx: &InferCtxt<'db>,
) -> ProofTreeData {
let goal_eval = InspectGoal::new(infcx, 0, proof_tree, None, GoalSource::Misc);
let mut serializer = ProofTreeSerializer::new(infcx);
serializer.serialize_goal(&goal_eval)
}
struct ProofTreeSerializer<'a, 'db> {
infcx: &'a InferCtxt<'db>,
}
impl<'a, 'db> ProofTreeSerializer<'a, 'db> {
fn new(infcx: &'a InferCtxt<'db>) -> Self {
Self { infcx }
}
fn serialize_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> ProofTreeData {
let candidates = goal.candidates();
let candidates_data: Vec<CandidateData> =
candidates.iter().map(|c| self.serialize_candidate(c)).collect();
ProofTreeData {
goal: format!("{:?}", goal.goal()),
result: format!("{:?}", goal.result()),
depth: goal.depth(),
candidates: candidates_data,
}
}
fn serialize_candidate(&mut self, candidate: &InspectCandidate<'_, 'db>) -> CandidateData {
let kind = candidate.kind();
let impl_header = self.get_impl_header(candidate);
let mut nested = Vec::new();
self.infcx.probe(|_| {
for nested_goal in candidate.instantiate_nested_goals() {
nested.push(self.serialize_goal(&nested_goal));
}
});
CandidateData {
kind: format!("{:?}", kind),
result: format!("{:?}", candidate.result()),
impl_header,
nested_goals: nested,
}
}
fn get_impl_header(&self, candidate: &InspectCandidate<'_, 'db>) -> Option<String> {
use rustc_type_ir::solve::inspect::ProbeKind;
match candidate.kind() {
ProbeKind::TraitCandidate { source, .. } => {
use rustc_type_ir::solve::CandidateSource;
match source {
CandidateSource::Impl(impl_def_id) => {
use hir_def::{Lookup, src::HasSource};
let db = self.infcx.interner.db;
let impl_src = impl_def_id.0.lookup(db).source(db);
Some(impl_src.value.to_string())
}
_ => None,
}
}
_ => None,
}
}
}

View file

@ -187,6 +187,9 @@ impl<'db> FulfillmentCtxt<'db> {
}
let result = delegate.evaluate_root_goal(goal, Span::dummy(), stalled_on);
infcx.inspect_evaluated_obligation(&obligation, &result, || {
Some(delegate.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1)
});
let GoalEvaluation { goal: _, certainty, has_changed, stalled_on } = match result {
Ok(result) => result,
Err(NoSolution) => {
@ -249,7 +252,7 @@ impl<'db> FulfillmentCtxt<'db> {
| TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ }
| TypingMode::PostAnalysis => return Default::default(),
};
let stalled_coroutines = stalled_coroutines.inner();
let stalled_coroutines = stalled_coroutines.as_slice();
if stalled_coroutines.is_empty() {
return Default::default();

View file

@ -9,7 +9,7 @@ use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt};
use rustc_type_ir::{
AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity,
error::ExpectedFound,
inherent::{IntoKind, SliceLike, Span as _},
inherent::{IntoKind, Span as _},
lang_items::SolverTraitLangItem,
solve::{Certainty, GoalSource, MaybeCause, NoSolution},
};

View file

@ -1,41 +1,226 @@
//! Things related to generic args in the next-trait-solver.
//! Things related to generic args in the next-trait-solver (`GenericArg`, `GenericArgs`, `Term`).
//!
//! Implementations of `GenericArg` and `Term` are pointer-tagged instead of an enum (rustc does
//! the same). This is done to save memory (which also helps speed) - one `GenericArg` is a machine
//! word instead of two, while matching on it is basically as cheap. The implementation for both
//! `GenericArg` and `Term` is shared in [`GenericArgImpl`]. This both simplifies the implementation,
//! as well as enables a noop conversion from `Term` to `GenericArg`.
use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull};
use hir_def::{GenericDefId, GenericParamId};
use macros::{TypeFoldable, TypeVisitable};
use intern::InternedRef;
use rustc_type_ir::{
ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSigTys,
GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance,
ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, FnSigTys,
GenericTypeVisitable, Interner, TyKind, TyVid, TypeFoldable, TypeFolder, TypeVisitable,
TypeVisitor, Variance,
inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
relate::{Relate, VarianceDiagInfo},
walk::TypeWalker,
};
use smallvec::SmallVec;
use crate::next_solver::{PolyFnSig, interned_vec_db};
use crate::next_solver::{
ConstInterned, PolyFnSig, RegionInterned, TyInterned, impl_foldable_for_interned_slice,
interned_slice,
};
use super::{
Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys,
generics::Generics,
};
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, salsa::Supertype)]
pub enum GenericArg<'db> {
Ty(Ty<'db>),
Lifetime(Region<'db>),
Const(Const<'db>),
pub type GenericArgKind<'db> = rustc_type_ir::GenericArgKind<DbInterner<'db>>;
pub type TermKind<'db> = rustc_type_ir::TermKind<DbInterner<'db>>;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
struct GenericArgImpl<'db> {
/// # Invariant
///
/// Contains an [`InternedRef`] of a [`Ty`], [`Const`] or [`Region`], bit-tagged as per the consts below.
ptr: NonNull<()>,
_marker: PhantomData<(Ty<'db>, Const<'db>, Region<'db>)>,
}
// SAFETY: We essentially own the `Ty`, `Const` or `Region`, and they are `Send + Sync`.
unsafe impl Send for GenericArgImpl<'_> {}
unsafe impl Sync for GenericArgImpl<'_> {}
impl<'db> GenericArgImpl<'db> {
const KIND_MASK: usize = 0b11;
const PTR_MASK: usize = !Self::KIND_MASK;
const TY_TAG: usize = 0b00;
const CONST_TAG: usize = 0b01;
const REGION_TAG: usize = 0b10;
#[inline]
fn new_ty(ty: Ty<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::TY_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn new_const(ty: Const<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::CONST_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn new_region(ty: Region<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::REGION_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn kind(self) -> GenericArgKind<'db> {
let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK);
// SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match.
unsafe {
match self.ptr.addr().get() & Self::KIND_MASK {
Self::TY_TAG => GenericArgKind::Type(Ty {
interned: InternedRef::from_raw(ptr.cast::<TyInterned>()),
}),
Self::CONST_TAG => GenericArgKind::Const(Const {
interned: InternedRef::from_raw(ptr.cast::<ConstInterned>()),
}),
Self::REGION_TAG => GenericArgKind::Lifetime(Region {
interned: InternedRef::from_raw(ptr.cast::<RegionInterned>()),
}),
_ => unreachable_unchecked(),
}
}
}
#[inline]
fn term_kind(self) -> TermKind<'db> {
let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK);
// SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match.
// It is the caller's responsibility (encapsulated within this module) to only call this with
// `Term`, which cannot be constructed from a `Region`.
unsafe {
match self.ptr.addr().get() & Self::KIND_MASK {
Self::TY_TAG => {
TermKind::Ty(Ty { interned: InternedRef::from_raw(ptr.cast::<TyInterned>()) })
}
Self::CONST_TAG => TermKind::Const(Const {
interned: InternedRef::from_raw(ptr.cast::<ConstInterned>()),
}),
_ => unreachable_unchecked(),
}
}
}
}
#[derive(PartialEq, Eq, Hash)]
pub struct StoredGenericArg {
ptr: GenericArgImpl<'static>,
}
impl Clone for StoredGenericArg {
#[inline]
fn clone(&self) -> Self {
match self.ptr.kind() {
GenericArgKind::Lifetime(it) => std::mem::forget(it.interned.to_owned()),
GenericArgKind::Type(it) => std::mem::forget(it.interned.to_owned()),
GenericArgKind::Const(it) => std::mem::forget(it.interned.to_owned()),
}
Self { ptr: self.ptr }
}
}
impl Drop for StoredGenericArg {
#[inline]
fn drop(&mut self) {
unsafe {
match self.ptr.kind() {
GenericArgKind::Lifetime(it) => it.interned.decrement_refcount(),
GenericArgKind::Type(it) => it.interned.decrement_refcount(),
GenericArgKind::Const(it) => it.interned.decrement_refcount(),
}
}
}
}
impl StoredGenericArg {
#[inline]
fn new(value: GenericArg<'_>) -> Self {
let result = Self { ptr: GenericArgImpl { ptr: value.ptr.ptr, _marker: PhantomData } };
// Increase refcount.
std::mem::forget(result.clone());
result
}
#[inline]
pub fn as_ref<'db>(&self) -> GenericArg<'db> {
GenericArg { ptr: self.ptr }
}
}
impl std::fmt::Debug for StoredGenericArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct GenericArg<'db> {
ptr: GenericArgImpl<'db>,
}
impl<'db> std::fmt::Debug for GenericArg<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ty(t) => std::fmt::Debug::fmt(t, f),
Self::Lifetime(r) => std::fmt::Debug::fmt(r, f),
Self::Const(c) => std::fmt::Debug::fmt(c, f),
match self.kind() {
GenericArgKind::Type(t) => std::fmt::Debug::fmt(&t, f),
GenericArgKind::Lifetime(r) => std::fmt::Debug::fmt(&r, f),
GenericArgKind::Const(c) => std::fmt::Debug::fmt(&c, f),
}
}
}
impl<'db> GenericArg<'db> {
#[inline]
pub fn store(self) -> StoredGenericArg {
StoredGenericArg::new(self)
}
#[inline]
pub fn kind(self) -> GenericArgKind<'db> {
self.ptr.kind()
}
pub fn ty(self) -> Option<Ty<'db>> {
match self.kind() {
GenericArgKind::Type(ty) => Some(ty),
@ -66,8 +251,8 @@ impl<'db> GenericArg<'db> {
#[inline]
pub(crate) fn expect_region(self) -> Region<'db> {
match self {
GenericArg::Lifetime(region) => region,
match self.kind() {
GenericArgKind::Lifetime(region) => region,
_ => panic!("expected a region, got {self:?}"),
}
}
@ -87,30 +272,32 @@ impl<'db> GenericArg<'db> {
}
impl<'db> From<Term<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Term<'db>) -> Self {
match value {
Term::Ty(ty) => GenericArg::Ty(ty),
Term::Const(c) => GenericArg::Const(c),
}
GenericArg { ptr: value.ptr }
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub enum Term<'db> {
Ty(Ty<'db>),
Const(Const<'db>),
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct Term<'db> {
ptr: GenericArgImpl<'db>,
}
impl<'db> std::fmt::Debug for Term<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ty(t) => std::fmt::Debug::fmt(t, f),
Self::Const(c) => std::fmt::Debug::fmt(c, f),
match self.kind() {
TermKind::Ty(t) => std::fmt::Debug::fmt(&t, f),
TermKind::Const(c) => std::fmt::Debug::fmt(&c, f),
}
}
}
impl<'db> Term<'db> {
#[inline]
pub fn kind(self) -> TermKind<'db> {
self.ptr.term_kind()
}
pub fn expect_type(&self) -> Ty<'db> {
self.as_type().expect("expected a type, but found a const")
}
@ -124,31 +311,108 @@ impl<'db> Term<'db> {
}
impl<'db> From<Ty<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Ty<'db>) -> Self {
Self::Ty(value)
GenericArg { ptr: GenericArgImpl::new_ty(value) }
}
}
impl<'db> From<Region<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Region<'db>) -> Self {
Self::Lifetime(value)
GenericArg { ptr: GenericArgImpl::new_region(value) }
}
}
impl<'db> From<Const<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Const<'db>) -> Self {
Self::Const(value)
GenericArg { ptr: GenericArgImpl::new_const(value) }
}
}
impl<'db> IntoKind for GenericArg<'db> {
type Kind = GenericArgKind<DbInterner<'db>>;
type Kind = GenericArgKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
match self {
GenericArg::Ty(ty) => GenericArgKind::Type(ty),
GenericArg::Lifetime(region) => GenericArgKind::Lifetime(region),
GenericArg::Const(c) => GenericArgKind::Const(c),
self.ptr.kind()
}
}
impl<'db, V> GenericTypeVisitable<V> for GenericArg<'db>
where
GenericArgKind<'db>: GenericTypeVisitable<V>,
{
fn generic_visit_with(&self, visitor: &mut V) {
self.kind().generic_visit_with(visitor);
}
}
impl<'db, V> GenericTypeVisitable<V> for Term<'db>
where
TermKind<'db>: GenericTypeVisitable<V>,
{
fn generic_visit_with(&self, visitor: &mut V) {
self.kind().generic_visit_with(visitor);
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for GenericArg<'db> {
fn visit_with<V: TypeVisitor<DbInterner<'db>>>(&self, visitor: &mut V) -> V::Result {
match self.kind() {
GenericArgKind::Lifetime(it) => it.visit_with(visitor),
GenericArgKind::Type(it) => it.visit_with(visitor),
GenericArgKind::Const(it) => it.visit_with(visitor),
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Term<'db> {
fn visit_with<V: TypeVisitor<DbInterner<'db>>>(&self, visitor: &mut V) -> V::Result {
match self.kind() {
TermKind::Ty(it) => it.visit_with(visitor),
TermKind::Const(it) => it.visit_with(visitor),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for GenericArg<'db> {
fn try_fold_with<F: FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(match self.kind() {
GenericArgKind::Lifetime(it) => it.try_fold_with(folder)?.into(),
GenericArgKind::Type(it) => it.try_fold_with(folder)?.into(),
GenericArgKind::Const(it) => it.try_fold_with(folder)?.into(),
})
}
fn fold_with<F: TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
GenericArgKind::Lifetime(it) => it.fold_with(folder).into(),
GenericArgKind::Type(it) => it.fold_with(folder).into(),
GenericArgKind::Const(it) => it.fold_with(folder).into(),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for Term<'db> {
fn try_fold_with<F: FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(match self.kind() {
TermKind::Ty(it) => it.try_fold_with(folder)?.into(),
TermKind::Const(it) => it.try_fold_with(folder)?.into(),
})
}
fn fold_with<F: TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
TermKind::Ty(it) => it.fold_with(folder).into(),
TermKind::Const(it) => it.fold_with(folder).into(),
}
}
}
@ -182,7 +446,15 @@ impl<'db> Relate<DbInterner<'db>> for GenericArg<'db> {
}
}
interned_vec_db!(GenericArgs, GenericArg);
interned_slice!(
GenericArgsStorage,
GenericArgs,
StoredGenericArgs,
generic_args,
GenericArg<'db>,
GenericArg<'static>,
);
impl_foldable_for_interned_slice!(GenericArgs);
impl<'db> rustc_type_ir::inherent::GenericArg<DbInterner<'db>> for GenericArg<'db> {}
@ -306,11 +578,10 @@ impl<'db> GenericArgs<'db> {
/// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple.
pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
// FIXME: should use `ClosureSubst` when possible
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
let interner = DbInterner::conjure();
rustc_type_ir::ClosureArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
parent_args,
closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(),
closure_kind_ty: closure_kind_ty.expect_ty(),
tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
@ -341,8 +612,8 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
a: Self,
b: Self,
) -> rustc_type_ir::relate::RelateResult<DbInterner<'db>, Self> {
let interner = relation.cx();
CollectAndApply::collect_and_apply(
GenericArgs::new_from_iter(
relation.cx(),
std::iter::zip(a.iter(), b.iter()).map(|(a, b)| {
relation.relate_with_variance(
Variance::Invariant,
@ -351,7 +622,6 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
b,
)
}),
|g| GenericArgs::new_from_iter(interner, g.iter().cloned()),
)
}
}
@ -397,29 +667,26 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
})
}
fn type_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Ty {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_type())
.unwrap_or_else(|| Ty::new_error(DbInterner::conjure(), ErrorGuaranteed))
}
fn region_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Region {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_region())
.unwrap_or_else(|| Region::error(DbInterner::conjure()))
}
fn const_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Const {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_const())
.unwrap_or_else(|| Const::error(DbInterner::conjure()))
}
fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
// FIXME: should use `ClosureSubst` when possible
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
let interner = DbInterner::conjure();
// This is stupid, but the next solver expects the first input to actually be a tuple
@ -428,13 +695,10 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
interner,
TyKind::FnPtr(
sig_tys.map_bound(|s| {
let inputs = Ty::new_tup_from_iter(interner, s.inputs().iter());
let inputs = Ty::new_tup(interner, s.inputs());
let output = s.output();
FnSigTys {
inputs_and_output: Tys::new_from_iter(
interner,
[inputs, output],
),
inputs_and_output: Tys::new_from_slice(&[inputs, output]),
}
}),
header,
@ -443,7 +707,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
_ => unreachable!("sig_ty should be last"),
};
rustc_type_ir::ClosureArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
parent_args,
closure_sig_as_fn_ptr_ty: sig_ty,
closure_kind_ty: closure_kind_ty.expect_ty(),
tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
@ -458,7 +722,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
fn split_coroutine_closure_args(
self,
) -> rustc_type_ir::CoroutineClosureArgsParts<DbInterner<'db>> {
match self.inner().as_slice() {
match self.as_slice() {
[
parent_args @ ..,
closure_kind_ty,
@ -466,10 +730,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
tupled_upvars_ty,
coroutine_captures_by_ref_ty,
] => rustc_type_ir::CoroutineClosureArgsParts {
parent_args: GenericArgs::new_from_iter(
DbInterner::conjure(),
parent_args.iter().cloned(),
),
parent_args,
closure_kind_ty: closure_kind_ty.expect_ty(),
signature_parts_ty: signature_parts_ty.expect_ty(),
tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
@ -480,11 +741,10 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
}
fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts<DbInterner<'db>> {
let interner = DbInterner::conjure();
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => {
rustc_type_ir::CoroutineArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
parent_args,
kind_ty: kind_ty.expect_ty(),
resume_ty: resume_ty.expect_ty(),
yield_ty: yield_ty.expect_ty(),
@ -518,25 +778,25 @@ pub fn error_for_param_kind<'db>(id: GenericParamId, interner: DbInterner<'db>)
}
impl<'db> IntoKind for Term<'db> {
type Kind = TermKind<DbInterner<'db>>;
type Kind = TermKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
match self {
Term::Ty(ty) => TermKind::Ty(ty),
Term::Const(c) => TermKind::Const(c),
}
self.ptr.term_kind()
}
}
impl<'db> From<Ty<'db>> for Term<'db> {
#[inline]
fn from(value: Ty<'db>) -> Self {
Self::Ty(value)
Term { ptr: GenericArgImpl::new_ty(value) }
}
}
impl<'db> From<Const<'db>> for Term<'db> {
#[inline]
fn from(value: Const<'db>) -> Self {
Self::Const(value)
Term { ptr: GenericArgImpl::new_const(value) }
}
}
@ -583,7 +843,7 @@ impl From<ConstVid> for TermVid {
impl<'db> DbInterner<'db> {
pub(super) fn mk_args(self, args: &[GenericArg<'db>]) -> GenericArgs<'db> {
GenericArgs::new_from_iter(self, args.iter().cloned())
GenericArgs::new_from_slice(args)
}
pub(super) fn mk_args_from_iter<I, T>(self, iter: I) -> T::Output

View file

@ -28,7 +28,7 @@
use rustc_type_ir::{
FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance,
error::ExpectedFound,
inherent::{IntoKind, Span as _},
inherent::Span as _,
relate::{Relate, TypeRelation, solver_relating::RelateExt},
};
@ -68,6 +68,7 @@ impl<'db> InferCtxt<'db> {
inner: self.inner.clone(),
tainted_by_errors: self.tainted_by_errors.clone(),
universe: self.universe.clone(),
obligation_inspector: self.obligation_inspector.clone(),
}
}
@ -84,6 +85,7 @@ impl<'db> InferCtxt<'db> {
inner: self.inner.clone(),
tainted_by_errors: self.tainted_by_errors.clone(),
universe: self.universe.clone(),
obligation_inspector: self.obligation_inspector.clone(),
}
}
}

View file

@ -8,7 +8,7 @@
use rustc_hash::FxHashMap;
use rustc_index::Idx;
use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, Ty as _};
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags,
TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
@ -498,7 +498,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
{
let base = Canonical {
max_universe: UniverseIndex::ROOT,
variables: CanonicalVars::new_from_iter(tcx, []),
variables: CanonicalVars::empty(tcx),
value: (),
};
Canonicalizer::canonicalize_with_base(
@ -562,7 +562,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
debug_assert!(!out_value.has_infer() && !out_value.has_placeholders());
let canonical_variables =
CanonicalVars::new_from_iter(tcx, canonicalizer.universe_canonicalized_variables());
CanonicalVars::new_from_slice(&canonicalizer.universe_canonicalized_variables());
let max_universe = canonical_variables
.iter()

View file

@ -23,7 +23,7 @@ use rustc_index::{Idx as _, IndexVec};
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
inherent::{GenericArg as _, IntoKind, SliceLike},
inherent::{GenericArg as _, IntoKind},
};
use tracing::{debug, instrument};

View file

@ -10,8 +10,9 @@ use ena::unify as ut;
use hir_def::GenericParamId;
use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage};
use region_constraints::{RegionConstraintCollector, RegionConstraintStorage};
use rustc_next_trait_solver::solve::SolverDelegateEvalExt;
use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt};
use rustc_pattern_analysis::Captures;
use rustc_type_ir::solve::{NoSolution, inspect};
use rustc_type_ir::{
ClosureKind, ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy,
IntVarValue, IntVid, OutlivesPredicate, RegionVid, TermKind, TyVid, TypeFoldable, TypeFolder,
@ -27,6 +28,7 @@ use traits::{ObligationCause, PredicateObligations};
use type_variable::TypeVariableOrigin;
use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey};
pub use crate::next_solver::infer::traits::ObligationInspector;
use crate::next_solver::{
ArgOutlivesPredicate, BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, Predicate,
SolverContext,
@ -250,6 +252,8 @@ pub struct InferCtxt<'db> {
/// when we enter into a higher-ranked (`for<..>`) type or trait
/// bound.
universe: Cell<UniverseIndex>,
obligation_inspector: Cell<Option<ObligationInspector<'db>>>,
}
/// See the `error_reporting` module for more details.
@ -375,6 +379,7 @@ impl<'db> InferCtxtBuilder<'db> {
inner: RefCell::new(InferCtxtInner::new()),
tainted_by_errors: Cell::new(None),
universe: Cell::new(UniverseIndex::ROOT),
obligation_inspector: Cell::new(None),
}
}
}
@ -1223,6 +1228,30 @@ impl<'db> InferCtxt<'db> {
fn sub_unify_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) {
self.inner.borrow_mut().type_variables().sub_unify(a, b);
}
/// Attach a callback to be invoked on each root obligation evaluated in the new trait solver.
pub fn attach_obligation_inspector(&self, inspector: ObligationInspector<'db>) {
debug_assert!(
self.obligation_inspector.get().is_none(),
"shouldn't override a set obligation inspector"
);
self.obligation_inspector.set(Some(inspector));
}
pub fn inspect_evaluated_obligation(
&self,
obligation: &PredicateObligation<'db>,
result: &Result<GoalEvaluation<DbInterner<'db>>, NoSolution>,
get_proof_tree: impl FnOnce() -> Option<inspect::GoalEvaluation<DbInterner<'db>>>,
) {
if let Some(inspector) = self.obligation_inspector.get() {
let result = match result {
Ok(GoalEvaluation { certainty, .. }) => Ok(*certainty),
Err(_) => Err(NoSolution),
};
(inspector)(self, obligation, result, get_proof_tree());
}
}
}
/// Helper for [InferCtxt::ty_or_const_infer_var_changed] (see comment on that), currently

View file

@ -3,7 +3,7 @@ use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt};
use tracing::{debug, instrument};
use crate::next_solver::{
ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty,
ArgOutlivesPredicate, GenericArgKind, Region, RegionOutlivesPredicate, Ty,
infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog},
};
@ -12,14 +12,14 @@ impl<'db> InferCtxt<'db> {
&self,
OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>,
) {
match arg {
GenericArg::Lifetime(r1) => {
match arg.kind() {
GenericArgKind::Lifetime(r1) => {
self.register_region_outlives_constraint(OutlivesPredicate(r1, r2));
}
GenericArg::Ty(ty1) => {
GenericArgKind::Type(ty1) => {
self.register_type_outlives_constraint(ty1, r2);
}
GenericArg::Const(_) => unreachable!(),
GenericArgKind::Const(_) => unreachable!(),
}
}

View file

@ -16,14 +16,14 @@ use tracing::{debug, instrument, warn};
use super::{
PredicateEmittingRelation, Relate, RelateResult, StructurallyRelateAliases, TypeRelation,
};
use crate::next_solver::infer::type_variable::TypeVariableValue;
use crate::next_solver::infer::unify_key::ConstVariableValue;
use crate::next_solver::infer::{InferCtxt, relate};
use crate::next_solver::util::MaxUniverse;
use crate::next_solver::{
AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, GenericArgs, PredicateKind, Region,
SolverDefId, Term, TermVid, Ty, TyKind, TypingMode, UnevaluatedConst,
AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, PredicateKind, Region, SolverDefId,
Term, TermVid, Ty, TyKind, TypingMode, UnevaluatedConst,
};
use crate::next_solver::{GenericArgs, infer::type_variable::TypeVariableValue};
impl<'db> InferCtxt<'db> {
/// The idea is that we should ensure that the type variable `target_vid`
@ -384,29 +384,26 @@ impl<'db> TypeRelation<DbInterner<'db>> for Generalizer<'_, 'db> {
self.infcx.interner
}
fn relate_item_args(
fn relate_ty_args(
&mut self,
item_def_id: SolverDefId,
a_arg: GenericArgs<'db>,
b_arg: GenericArgs<'db>,
) -> RelateResult<'db, GenericArgs<'db>> {
if self.ambient_variance == Variance::Invariant {
a_ty: Ty<'db>,
_: Ty<'db>,
def_id: SolverDefId,
a_args: GenericArgs<'db>,
b_args: GenericArgs<'db>,
mk: impl FnOnce(GenericArgs<'db>) -> Ty<'db>,
) -> RelateResult<'db, Ty<'db>> {
let args = if self.ambient_variance == Variance::Invariant {
// Avoid fetching the variance if we are in an invariant
// context; no need, and it can induce dependency cycles
// (e.g., #41849).
relate::relate_args_invariantly(self, a_arg, b_arg)
relate::relate_args_invariantly(self, a_args, b_args)
} else {
let tcx = self.cx();
let opt_variances = tcx.variances_of(item_def_id);
relate::relate_args_with_variances(
self,
item_def_id,
opt_variances,
a_arg,
b_arg,
false,
)
}
let interner = self.cx();
let variances = interner.variances_of(def_id);
relate::relate_args_with_variances(self, variances, a_args, b_args)
}?;
if args == a_args { Ok(a_ty) } else { Ok(mk(args)) }
}
#[instrument(level = "debug", skip(self, variance, b), ret)]

View file

@ -18,17 +18,19 @@
//! [lattices]: https://en.wikipedia.org/wiki/Lattice_(order)
use rustc_type_ir::{
AliasRelationDirection, TypeVisitableExt, Upcast, Variance,
AliasRelationDirection, Interner, TypeVisitableExt, Upcast, Variance,
inherent::{IntoKind, Span as _},
relate::{
Relate, StructurallyRelateAliases, TypeRelation, VarianceDiagInfo,
combine::{PredicateEmittingRelation, super_combine_consts, super_combine_tys},
combine::{
PredicateEmittingRelation, combine_ty_args, super_combine_consts, super_combine_tys,
},
},
};
use crate::next_solver::{
AliasTy, Binder, Const, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Region, Span, Ty,
TyKind,
AliasTy, Binder, Const, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, PredicateKind,
Region, SolverDefId, Span, Ty, TyKind,
infer::{
InferCtxt, TypeTrace,
relate::RelateResult,
@ -82,6 +84,19 @@ impl<'db> TypeRelation<DbInterner<'db>> for LatticeOp<'_, 'db> {
self.infcx.interner
}
fn relate_ty_args(
&mut self,
a_ty: Ty<'db>,
b_ty: Ty<'db>,
def_id: SolverDefId,
a_args: GenericArgs<'db>,
b_args: GenericArgs<'db>,
mk: impl FnOnce(GenericArgs<'db>) -> Ty<'db>,
) -> RelateResult<'db, Ty<'db>> {
let variances = self.cx().variances_of(def_id);
combine_ty_args(self.infcx, self, a_ty, b_ty, variances, a_args, b_args, mk)
}
fn relate_with_variance<T: Relate<DbInterner<'db>>>(
&mut self,
variance: Variance,

View file

@ -9,8 +9,11 @@ use std::{
use hir_def::TraitId;
use macros::{TypeFoldable, TypeVisitable};
use rustc_type_ir::Upcast;
use rustc_type_ir::elaborate::Elaboratable;
use rustc_type_ir::{
Upcast,
solve::{Certainty, NoSolution, inspect},
};
use tracing::debug;
use crate::next_solver::{
@ -79,6 +82,15 @@ pub struct Obligation<'db, T> {
pub recursion_depth: usize,
}
/// A callback that can be provided to `inspect_typeck`. Invoked on evaluation
/// of root obligations.
pub type ObligationInspector<'db> = fn(
&InferCtxt<'db>,
&PredicateObligation<'db>,
Result<Certainty, NoSolution>,
Option<inspect::GoalEvaluation<DbInterner<'db>>>,
);
/// For [`Obligation`], a sub-obligation is combined with the current obligation's
/// param-env and cause code.
impl<'db> Elaboratable<DbInterner<'db>> for PredicateObligation<'db> {

View file

@ -74,7 +74,7 @@ impl<'a, 'db> std::fmt::Debug for InspectCandidate<'a, 'db> {
/// treat `NormalizesTo` goals as if they apply the expected
/// type at the end of each candidate.
#[derive(Debug, Copy, Clone)]
struct NormalizesToTermHack<'db> {
pub(crate) struct NormalizesToTermHack<'db> {
term: Term<'db>,
unconstrained_term: Term<'db>,
}
@ -311,10 +311,7 @@ impl<'a, 'db> InspectCandidate<'a, 'db> {
/// Visit all nested goals of this candidate, rolling back
/// all inference constraints.
#[expect(dead_code, reason = "used in rustc")]
pub(crate) fn visit_nested_in_probe<V: ProofTreeVisitor<'db>>(
&self,
visitor: &mut V,
) -> V::Result {
fn visit_nested_in_probe<V: ProofTreeVisitor<'db>>(&self, visitor: &mut V) -> V::Result {
self.goal.infcx.probe(|_| self.visit_nested_no_probe(visitor))
}
}
@ -430,7 +427,7 @@ impl<'a, 'db> InspectGoal<'a, 'db> {
candidates.pop().filter(|_| candidates.is_empty())
}
fn new(
pub(crate) fn new(
infcx: &'a InferCtxt<'db>,
depth: usize,
root: inspect::GoalEvaluation<DbInterner<'db>>,

View file

@ -1,7 +1,9 @@
//! Things related to the Interner in the next-trait-solver.
use std::fmt;
use std::{fmt, ops::ControlFlow};
use intern::{Interned, InternedRef, InternedSliceRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_ast_ir::{FloatTy, IntTy, UintTy};
pub use tls_cache::clear_tls_solver_cache;
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
@ -19,13 +21,13 @@ use rustc_abi::{ReprFlags, ReprOptions};
use rustc_hash::FxHashSet;
use rustc_index::bit_set::DenseBitSet;
use rustc_type_ir::{
AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex,
EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance,
AliasTermKind, AliasTyKind, BoundVar, CoroutineWitnessTypes, DebruijnIndex, EarlyBinder,
FlagComputation, Flags, GenericArgKind, GenericTypeVisitable, ImplPolarity, InferTy, Interner,
TraitRef, TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance,
elaborate::elaborate,
error::TypeError,
fast_reject,
inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
inherent::{self, Const as _, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem},
solve::SizedTraitKind,
};
@ -39,7 +41,7 @@ use crate::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, ImplIdWrapper,
OpaqueTypeKey, RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds,
TraitIdWrapper, TypeAliasIdWrapper, util::explicit_item_bounds,
TraitIdWrapper, TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds,
},
};
@ -57,155 +59,18 @@ use super::{
util::sizedness_constraint_for_ty,
};
#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)]
pub struct InternedWrapperNoDebug<T>(pub(crate) T);
macro_rules! interned_slice {
($storage:ident, $name:ident, $stored_name:ident, $default_types_field:ident, $ty_db:ty, $ty_static:ty $(,)?) => {
const _: () = {
#[allow(unused_lifetimes)]
fn _ensure_correct_types<'db: 'static>(v: $ty_db) -> $ty_static { v }
};
#[macro_export]
#[doc(hidden)]
macro_rules! _interned_vec_nolifetime_salsa {
($name:ident, $ty:ty) => {
interned_vec_nolifetime_salsa!($name, $ty, nofold);
::intern::impl_slice_internable!(gc; $storage, (), $ty_static);
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok($name::new_(folder.cx().db(), inner))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.fold_with(folder)).collect();
$name::new_(folder.cx().db(), inner)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
V::Result::output()
}
}
};
($name:ident, $ty:ty, nofold) => {
#[salsa::interned(constructor = new_)]
pub struct $name {
#[returns(ref)]
inner_: smallvec::SmallVec<[$ty; 2]>,
}
impl<'db> $name<'db> {
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = $ty>,
) -> Self {
$name::new_(interner.db(), data.into_iter().collect::<smallvec::SmallVec<[_; 2]>>())
}
pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> {
// SAFETY: ¯\_(ツ)_/¯
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
}
}
impl<'db> std::fmt::Debug for $name<'db> {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_slice().fmt(fmt)
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty;
type IntoIter = <smallvec::SmallVec<[$ty; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().clone().into_iter()
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().as_slice()
}
}
impl<'db> IntoIterator for $name<'db> {
type Item = $ty;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
}
}
impl<'db> Default for $name<'db> {
fn default() -> Self {
$name::new_from_iter(DbInterner::conjure(), [])
}
}
};
}
pub use crate::_interned_vec_nolifetime_salsa as interned_vec_nolifetime_salsa;
#[macro_export]
#[doc(hidden)]
macro_rules! _interned_vec_db {
($name:ident, $ty:ident) => {
interned_vec_db!($name, $ty, nofold);
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok($name::new_(folder.cx().db(), inner))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.fold_with(folder)).collect();
$name::new_(folder.cx().db(), inner)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
V::Result::output()
}
}
};
($name:ident, $ty:ident, nofold) => {
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct $name<'db> {
#[returns(ref)]
inner_: smallvec::SmallVec<[$ty<'db>; 2]>,
interned: ::intern::InternedSliceRef<'db, $storage>,
}
impl<'db> std::fmt::Debug for $name<'db> {
@ -215,58 +80,235 @@ macro_rules! _interned_vec_db {
}
impl<'db> $name<'db> {
#[inline]
pub fn empty(interner: DbInterner<'db>) -> Self {
$name::new_(interner.db(), smallvec::SmallVec::new())
interner.default_types().empty.$default_types_field
}
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = $ty<'db>>,
) -> Self {
$name::new_(interner.db(), data.into_iter().collect::<smallvec::SmallVec<[_; 2]>>())
#[inline]
pub fn new_from_slice(slice: &[$ty_db]) -> Self {
let slice = unsafe { ::std::mem::transmute::<&[$ty_db], &[$ty_static]>(slice) };
Self { interned: ::intern::InternedSlice::from_header_and_slice((), slice) }
}
pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> {
// SAFETY: ¯\_(ツ)_/¯
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
#[inline]
pub fn new_from_iter<I, T>(_interner: DbInterner<'db>, args: I) -> T::Output
where
I: IntoIterator<Item = T>,
T: ::rustc_type_ir::CollectAndApply<$ty_db, Self>,
{
::rustc_type_ir::CollectAndApply::collect_and_apply(args.into_iter(), |g| {
Self::new_from_slice(g)
})
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty<'db>;
type IntoIter = <smallvec::SmallVec<[$ty<'db>; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().clone().into_iter()
#[inline]
pub fn as_slice(self) -> &'db [$ty_db] {
let slice = &self.interned.get().slice;
unsafe { ::std::mem::transmute::<&[$ty_static], &[$ty_db]>(slice) }
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().as_slice()
#[inline]
pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>> {
self.as_slice().iter().copied()
}
#[inline]
pub fn len(self) -> usize {
self.as_slice().len()
}
#[inline]
pub fn is_empty(self) -> bool {
self.as_slice().is_empty()
}
}
impl<'db> IntoIterator for $name<'db> {
type Item = $ty<'db>;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>;
type Item = $ty_db;
#[inline]
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
impl<'db> ::std::ops::Deref for $name<'db> {
type Target = [$ty_db];
#[inline]
fn deref(&self) -> &Self::Target {
(*self).as_slice()
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty_db;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>;
#[inline]
fn iter(self) -> Self::IntoIter {
self.iter()
}
#[inline]
fn as_slice(&self) -> &[Self::Item] {
(*self).as_slice()
}
}
impl<'db> Default for $name<'db> {
#[inline]
fn default() -> Self {
$name::new_from_iter(DbInterner::conjure(), [])
$name::empty(DbInterner::conjure())
}
}
impl<'db, V: $crate::next_solver::interner::WorldExposer>
rustc_type_ir::GenericTypeVisitable<V> for $name<'db>
{
#[inline]
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned_slice(self.interned).is_continue() {
self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor));
}
}
}
$crate::next_solver::interner::impl_stored_interned_slice!($storage, $name, $stored_name);
};
}
pub(crate) use interned_slice;
macro_rules! impl_stored_interned_slice {
( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => {
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct $stored_name {
interned: ::intern::InternedSlice<$storage>,
}
impl $stored_name {
#[inline]
fn new(it: $name<'_>) -> Self {
Self { interned: it.interned.to_owned() }
}
#[inline]
pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> {
let it = $name { interned: self.interned.as_ref() };
unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) }
}
}
// SAFETY: It is safe to store this type in queries (but not `$name`).
unsafe impl salsa::Update for $stored_name {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
// SAFETY: Comparing by (pointer) equality is safe.
unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) }
}
}
impl std::fmt::Debug for $stored_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
impl $name<'_> {
#[inline]
pub fn store(self) -> $stored_name {
$stored_name::new(self)
}
}
};
}
pub(crate) use impl_stored_interned_slice;
pub use crate::_interned_vec_db as interned_vec_db;
macro_rules! impl_foldable_for_interned_slice {
($name:ident) => {
impl<'db> ::rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
rustc_ast_ir::walk_visitable_list!(visitor, (*self).iter());
V::Result::output()
}
}
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Self::new_from_iter(folder.cx(), self.iter().map(|it| it.try_fold_with(folder)))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
Self::new_from_iter(folder.cx(), self.iter().map(|it| it.fold_with(folder)))
}
}
};
}
pub(crate) use impl_foldable_for_interned_slice;
macro_rules! impl_stored_interned {
( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => {
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct $stored_name {
interned: ::intern::Interned<$storage>,
}
impl $stored_name {
#[inline]
fn new(it: $name<'_>) -> Self {
Self { interned: it.interned.to_owned() }
}
#[inline]
pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> {
let it = $name { interned: self.interned.as_ref() };
unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) }
}
}
unsafe impl salsa::Update for $stored_name {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) }
}
}
impl std::fmt::Debug for $stored_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
impl $name<'_> {
#[inline]
pub fn store(self) -> $stored_name {
$stored_name::new(self)
}
}
};
}
pub(crate) use impl_stored_interned;
/// This is a visitor trait that treats any interned thing specifically. Visitables are expected to call
/// the trait's methods when encountering an interned. This is used to implement marking in GC.
pub trait WorldExposer {
fn on_interned<T: intern::Internable>(
&mut self,
interned: InternedRef<'_, T>,
) -> ControlFlow<()>;
fn on_interned_slice<T: intern::SliceInternable>(
&mut self,
interned: InternedSliceRef<'_, T>,
) -> ControlFlow<()>;
}
#[derive(Debug, Copy, Clone)]
pub struct DbInterner<'db> {
@ -321,6 +363,11 @@ impl<'db> DbInterner<'db> {
where you should've called `DbInterner::new_with()`",
)
}
#[inline]
pub fn default_types<'a>(&self) -> &'a crate::next_solver::DefaultAny<'db> {
crate::next_solver::default_types(self.db)
}
}
// This is intentionally left as `()`
@ -333,7 +380,14 @@ impl<'db> inherent::Span<DbInterner<'db>> for Span {
}
}
interned_vec_nolifetime_salsa!(BoundVarKinds, BoundVarKind, nofold);
interned_slice!(
BoundVarKindsStorage,
BoundVarKinds,
StoredBoundVarKinds,
bound_var_kinds,
BoundVarKind,
BoundVarKind,
);
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BoundVarKind {
@ -365,7 +419,14 @@ impl BoundVarKind {
}
}
interned_vec_db!(CanonicalVars, CanonicalVarKind, nofold);
interned_slice!(
CanonicalVarsStorage,
CanonicalVars,
StoredCanonicalVars,
canonical_vars,
CanonicalVarKind<'db>,
CanonicalVarKind<'static>
);
pub struct DepNodeIndex;
@ -391,7 +452,7 @@ impl<T: std::fmt::Debug> std::fmt::Debug for Placeholder<T> {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct AllocId;
interned_vec_nolifetime_salsa!(VariancesOf, Variance, nofold);
interned_slice!(VariancesOfStorage, VariancesOf, StoredVariancesOf, variances, Variance, Variance);
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct VariantIdx(usize);
@ -658,7 +719,7 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
let id: VariantId = struct_id.into();
let field_types = interner.db().field_types(id);
field_types.iter().last().map(|f| *f.1)
field_types.iter().last().map(|f| f.1.get())
}
fn all_field_tys(
@ -668,7 +729,7 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
let db = interner.db();
// FIXME: this is disabled just to match the behavior with chalk right now
let _field_tys = |id: VariantId| {
db.field_types(id).iter().map(|(_, ty)| ty.skip_binder()).collect::<Vec<_>>()
db.field_types(id).iter().map(|(_, ty)| ty.get().skip_binder()).collect::<Vec<_>>()
};
let field_tys = |_id: VariantId| vec![];
let tys: Vec<_> = match self.inner().id {
@ -762,30 +823,36 @@ impl std::ops::Deref for UnsizingParams {
pub type PatternKind<'db> = rustc_type_ir::PatternKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_, debug)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Pattern<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<PatternKind<'db>>,
interned: InternedRef<'db, PatternInterned>,
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<PatternKind<'db>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
struct PatternInterned(PatternKind<'static>);
impl_internable!(gc; PatternInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Pattern<'static>>();
};
impl<'db> Pattern<'db> {
pub fn new(interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self {
Pattern::new_(interner.db(), InternedWrapperNoDebug(kind))
pub fn new(_interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<PatternKind<'db>, PatternKind<'static>>(kind) };
Self { interned: Interned::new_gc(PatternInterned(kind)) }
}
pub fn inner(&self) -> &PatternKind<'db> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&PatternKind<'static>, &PatternKind<'db>>(inner) }
}
}
impl<'db> std::fmt::Debug for Pattern<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
@ -831,6 +898,36 @@ impl<'db> rustc_type_ir::inherent::IntoKind for Pattern<'db> {
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for Pattern<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
self.kind().visit_with(visitor)
}
}
impl<'db, V: WorldExposer> rustc_type_ir::GenericTypeVisitable<V> for Pattern<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for Pattern<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(Pattern::new(folder.cx(), self.kind().try_fold_with(folder)?))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
Pattern::new(folder.cx(), self.kind().fold_with(folder))
}
}
impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
relation: &mut R,
@ -851,9 +948,9 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
if a.len() != b.len() {
return Err(TypeError::Mismatch);
}
let pats = CollectAndApply::collect_and_apply(
let pats = PatList::new_from_iter(
relation.cx(),
std::iter::zip(a.iter(), b.iter()).map(|(a, b)| relation.relate(a, b)),
|g| PatList::new_from_iter(tcx, g.iter().cloned()),
)?;
Ok(Pattern::new(tcx, PatternKind::Or(pats)))
}
@ -865,7 +962,8 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
}
}
interned_vec_db!(PatList, Pattern);
interned_slice!(PatListStorage, PatList, StoredPatList, pat_list, Pattern<'db>, Pattern<'static>);
impl_foldable_for_interned_slice!(PatList);
macro_rules! as_lang_item {
(
@ -927,7 +1025,7 @@ impl<'db> Interner for DbInterner<'db> {
type Span = Span;
type GenericArgs = GenericArgs<'db>;
type GenericArgsSlice = GenericArgs<'db>;
type GenericArgsSlice = &'db [GenericArg<'db>];
type GenericArg = GenericArg<'db>;
type Term = Term<'db>;
@ -941,7 +1039,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
data: &[(OpaqueTypeKey<'db>, Self::Ty)],
) -> Self::PredefinedOpaques {
PredefinedOpaques::new_from_iter(self, data.iter().cloned())
PredefinedOpaques::new_from_slice(data)
}
type CanonicalVarKinds = CanonicalVars<'db>;
@ -950,7 +1048,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
kinds: &[rustc_type_ir::CanonicalVarKind<Self>],
) -> Self::CanonicalVarKinds {
CanonicalVars::new_from_iter(self, kinds.iter().cloned())
CanonicalVars::new_from_slice(kinds)
}
type ExternalConstraints = ExternalConstraints<'db>;
@ -968,7 +1066,7 @@ impl<'db> Interner for DbInterner<'db> {
type Ty = Ty<'db>;
type Tys = Tys<'db>;
type FnInputTys = Tys<'db>;
type FnInputTys = &'db [Ty<'db>];
type ParamTy = ParamTy;
type BoundTy = BoundTy;
type PlaceholderTy = PlaceholderTy;
@ -1012,7 +1110,7 @@ impl<'db> Interner for DbInterner<'db> {
type Features = Features;
fn mk_args(self, args: &[Self::GenericArg]) -> Self::GenericArgs {
GenericArgs::new_from_iter(self, args.iter().cloned())
GenericArgs::new_from_slice(args)
}
fn mk_args_from_iter<I, T>(self, args: I) -> T::Output
@ -1020,9 +1118,7 @@ impl<'db> Interner for DbInterner<'db> {
I: Iterator<Item = T>,
T: rustc_type_ir::CollectAndApply<Self::GenericArg, Self::GenericArgs>,
{
CollectAndApply::collect_and_apply(args, |g| {
GenericArgs::new_from_iter(self, g.iter().cloned())
})
GenericArgs::new_from_iter(self, args)
}
type UnsizingParams = UnsizingParams;
@ -1096,7 +1192,7 @@ impl<'db> Interner for DbInterner<'db> {
| SolverDefId::ImplId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_) => {
return VariancesOf::new_from_iter(self, []);
return VariancesOf::empty(self);
}
};
self.db.variances_of(generic_def)
@ -1174,12 +1270,9 @@ impl<'db> Interner for DbInterner<'db> {
) -> (rustc_type_ir::TraitRef<Self>, Self::GenericArgsSlice) {
let trait_def_id = self.parent(def_id);
let trait_generics = self.generics_of(trait_def_id);
let trait_args = GenericArgs::new_from_iter(
self,
args.as_slice()[0..trait_generics.own_params.len()].iter().cloned(),
);
let alias_args =
GenericArgs::new_from_iter(self, args.iter().skip(trait_generics.own_params.len()));
let trait_args =
GenericArgs::new_from_slice(&args.as_slice()[0..trait_generics.own_params.len()]);
let alias_args = &args.as_slice()[trait_generics.own_params.len()..];
(TraitRef::new_from_args(self, trait_def_id.try_into().unwrap(), trait_args), alias_args)
}
@ -1202,7 +1295,7 @@ impl<'db> Interner for DbInterner<'db> {
I: Iterator<Item = T>,
T: rustc_type_ir::CollectAndApply<Self::Ty, Self::Tys>,
{
CollectAndApply::collect_and_apply(args, |g| Tys::new_from_iter(self, g.iter().cloned()))
Tys::new_from_iter(self, args)
}
fn parent(self, def_id: Self::DefId) -> Self::DefId {
@ -1338,7 +1431,7 @@ impl<'db> Interner for DbInterner<'db> {
let own_bounds: FxHashSet<_> =
self.item_self_bounds(def_id).skip_binder().into_iter().collect();
if all_bounds.len() == own_bounds.len() {
EarlyBinder::bind(Clauses::new_from_iter(self, []))
EarlyBinder::bind(Clauses::empty(self))
} else {
EarlyBinder::bind(Clauses::new_from_iter(
self,
@ -1512,6 +1605,7 @@ impl<'db> Interner for DbInterner<'db> {
SolverTraitLangItem::BikeshedGuaranteedNoDrop => {
unimplemented!()
}
SolverTraitLangItem::TrivialClone => lang_items.TrivialClone,
};
lang_item.expect("Lang item required but not found.").into()
}
@ -1565,6 +1659,7 @@ impl<'db> Interner for DbInterner<'db> {
AsyncFn,
AsyncFnMut,
AsyncFnOnce,
TrivialClone,
)
}
@ -1651,6 +1746,7 @@ impl<'db> Interner for DbInterner<'db> {
AsyncFn,
AsyncFnMut,
AsyncFnOnce,
TrivialClone,
)
}
@ -1949,7 +2045,7 @@ impl<'db> Interner for DbInterner<'db> {
let field_types = self.db().field_types(variant.id());
let mut unsizing_params = DenseBitSet::new_empty(num_params);
let ty = field_types[tail_field.0];
let ty = field_types[tail_field.0].get();
for arg in ty.instantiate_identity().walk() {
if let Some(i) = maybe_unsizing_param_idx(arg) {
unsizing_params.insert(i);
@ -1959,7 +2055,7 @@ impl<'db> Interner for DbInterner<'db> {
// Ensure none of the other fields mention the parameters used
// in unsizing.
for field in prefix_fields {
for arg in field_types[field.0].instantiate_identity().walk() {
for arg in field_types[field.0].get().instantiate_identity().walk() {
if let Some(i) = maybe_unsizing_param_idx(arg) {
unsizing_params.remove(i);
}
@ -2007,9 +2103,7 @@ impl<'db> Interner for DbInterner<'db> {
let mut map = Default::default();
let delegate = Anonymize { interner: self, map: &mut map };
let inner = self.replace_escaping_bound_vars_uncached(value.skip_binder(), delegate);
let bound_vars = CollectAndApply::collect_and_apply(map.into_values(), |xs| {
BoundVarKinds::new_from_iter(self, xs.iter().cloned())
});
let bound_vars = BoundVarKinds::new_from_iter(self, map.into_values());
Binder::bind_with_vars(inner, bound_vars)
}
@ -2019,7 +2113,7 @@ impl<'db> Interner for DbInterner<'db> {
};
let mut result = Vec::new();
crate::opaques::opaque_types_defined_by(self.db, def_id, &mut result);
SolverDefIds::new_from_iter(self, result)
SolverDefIds::new_from_slice(&result)
}
fn opaque_types_and_coroutines_defined_by(self, def_id: Self::LocalDefId) -> Self::LocalDefIds {
@ -2048,7 +2142,7 @@ impl<'db> Interner for DbInterner<'db> {
}
});
SolverDefIds::new_from_iter(self, result)
SolverDefIds::new_from_slice(&result)
}
fn alias_has_const_conditions(self, _def_id: Self::DefId) -> bool {
@ -2093,10 +2187,10 @@ impl<'db> Interner for DbInterner<'db> {
let impl_trait_id = self.db().lookup_intern_impl_trait_id(opaque);
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
crate::opaques::rpit_hidden_types(self.db, func)[idx]
crate::opaques::rpit_hidden_types(self.db, func)[idx].get()
}
crate::ImplTraitId::TypeAliasImplTrait(type_alias, idx) => {
crate::opaques::tait_hidden_types(self.db, type_alias)[idx]
crate::opaques::tait_hidden_types(self.db, type_alias)[idx].get()
}
}
}
@ -2167,6 +2261,18 @@ impl<'db> Interner for DbInterner<'db> {
Some(SolverTraitLangItem::Sized | SolverTraitLangItem::MetaSized)
)
}
fn const_of_item(self, def_id: Self::DefId) -> rustc_type_ir::EarlyBinder<Self, Self::Const> {
let id = match def_id {
SolverDefId::StaticId(id) => id.into(),
SolverDefId::ConstId(id) => id.into(),
_ => unreachable!(),
};
EarlyBinder::bind(Const::new_unevaluated(
self,
UnevaluatedConst { def: GeneralConstIdWrapper(id), args: GenericArgs::empty(self) },
))
}
}
impl<'db> DbInterner<'db> {
@ -2273,6 +2379,11 @@ macro_rules! TrivialTypeTraversalImpls {
<F::Result as rustc_ast_ir::visit::VisitorResult>::output()
}
}
impl<V> rustc_type_ir::GenericTypeVisitable<V> for $ty {
#[inline]
fn generic_visit_with(&self, _visitor: &mut V) {}
}
)+
};
}
@ -2287,17 +2398,22 @@ TrivialTypeTraversalImpls! {
AdtIdWrapper,
ImplIdWrapper,
GeneralConstIdWrapper,
Pattern<'db>,
Safety,
FnAbi,
Span,
ParamConst,
ParamTy,
BoundRegion,
BoundVar,
Placeholder<BoundRegion>,
Placeholder<BoundTy>,
Placeholder<BoundVar>,
Placeholder<BoundConst>,
BoundVarKind,
EarlyParamRegion,
LateParamRegion,
AdtDef,
BoundTy,
BoundConst,
}
mod tls_db {
@ -2464,3 +2580,110 @@ mod tls_cache {
GLOBAL_CACHE.with_borrow_mut(|handle| *handle = None);
}
}
impl WorldExposer for intern::GarbageCollector {
fn on_interned<T: intern::Internable>(
&mut self,
interned: InternedRef<'_, T>,
) -> ControlFlow<()> {
self.mark_interned_alive(interned)
}
fn on_interned_slice<T: intern::SliceInternable>(
&mut self,
interned: InternedSliceRef<'_, T>,
) -> ControlFlow<()> {
self.mark_interned_slice_alive(interned)
}
}
/// # Safety
///
/// This cannot be called if there are some not-yet-recorded type values. Generally, if you have a mutable
/// reference to the database, and there are no other database - then you can call this safely, but you
/// also need to make sure to maintain the mutable reference while this is running.
pub unsafe fn collect_ty_garbage() {
let mut gc = intern::GarbageCollector::default();
gc.add_storage::<super::consts::ConstInterned>();
gc.add_storage::<super::consts::ValtreeInterned>();
gc.add_storage::<PatternInterned>();
gc.add_storage::<super::opaques::ExternalConstraintsInterned>();
gc.add_storage::<super::predicate::PredicateInterned>();
gc.add_storage::<super::region::RegionInterned>();
gc.add_storage::<super::ty::TyInterned>();
gc.add_slice_storage::<super::predicate::ClausesStorage>();
gc.add_slice_storage::<super::generic_arg::GenericArgsStorage>();
gc.add_slice_storage::<BoundVarKindsStorage>();
gc.add_slice_storage::<VariancesOfStorage>();
gc.add_slice_storage::<CanonicalVarsStorage>();
gc.add_slice_storage::<PatListStorage>();
gc.add_slice_storage::<super::opaques::PredefinedOpaquesStorage>();
gc.add_slice_storage::<super::opaques::SolverDefIdsStorage>();
gc.add_slice_storage::<super::predicate::BoundExistentialPredicatesStorage>();
gc.add_slice_storage::<super::region::RegionAssumptionsStorage>();
gc.add_slice_storage::<super::ty::TysStorage>();
// SAFETY:
// - By our precondition, there are no unrecorded types.
// - We implement `GcInternedVisit` and `GcInternedSliceVisit` correctly for all types.
// - We added all storages (FIXME: it's too easy to forget to add a new storage here).
unsafe { gc.collect() };
}
macro_rules! impl_gc_visit {
( $($ty:ty),* $(,)? ) => {
$(
impl ::intern::GcInternedVisit for $ty {
#[inline]
fn visit_with(&self, gc: &mut ::intern::GarbageCollector) {
self.generic_visit_with(gc);
}
}
)*
};
}
impl_gc_visit!(
super::consts::ConstInterned,
super::consts::ValtreeInterned,
PatternInterned,
super::opaques::ExternalConstraintsInterned,
super::predicate::PredicateInterned,
super::region::RegionInterned,
super::ty::TyInterned,
super::predicate::ClausesCachedTypeInfo,
);
macro_rules! impl_gc_visit_slice {
( $($ty:ty),* $(,)? ) => {
$(
impl ::intern::GcInternedSliceVisit for $ty {
#[inline]
fn visit_header(header: &<Self as ::intern::SliceInternable>::Header, gc: &mut ::intern::GarbageCollector) {
header.generic_visit_with(gc);
}
#[inline]
fn visit_slice(header: &[<Self as ::intern::SliceInternable>::SliceType], gc: &mut ::intern::GarbageCollector) {
header.generic_visit_with(gc);
}
}
)*
};
}
impl_gc_visit_slice!(
super::predicate::ClausesStorage,
super::generic_arg::GenericArgsStorage,
BoundVarKindsStorage,
VariancesOfStorage,
CanonicalVarsStorage,
PatListStorage,
super::opaques::PredefinedOpaquesStorage,
super::opaques::SolverDefIdsStorage,
super::predicate::BoundExistentialPredicatesStorage,
super::region::RegionAssumptionsStorage,
super::ty::TysStorage,
);

View file

@ -2,7 +2,6 @@
use std::any::type_name_of_val;
use rustc_type_ir::inherent::SliceLike;
use rustc_type_ir::{self as ty, ir_print::IrPrint};
use super::SolverDefId;

View file

@ -1,37 +1,74 @@
//! Things related to opaques in the next-trait-solver.
use intern::{Interned, InternedRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_ast_ir::try_visit;
use rustc_type_ir::inherent::SliceLike;
use super::{DbInterner, SolverDefId, Ty, interned_vec_db, interned_vec_nolifetime_salsa};
use crate::next_solver::{impl_foldable_for_interned_slice, interned_slice};
use super::{DbInterner, SolverDefId, Ty};
pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey<DbInterner<'db>>;
type PredefinedOpaque<'db> = (OpaqueTypeKey<'db>, Ty<'db>);
interned_vec_db!(PredefinedOpaques, PredefinedOpaque);
interned_slice!(
PredefinedOpaquesStorage,
PredefinedOpaques,
StoredPredefinedOpaques,
predefined_opaques,
PredefinedOpaque<'db>,
PredefinedOpaque<'static>,
);
impl_foldable_for_interned_slice!(PredefinedOpaques);
pub type ExternalConstraintsData<'db> =
rustc_type_ir::solve::ExternalConstraintsData<DbInterner<'db>>;
interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId);
interned_slice!(
SolverDefIdsStorage,
SolverDefIds,
StoredSolverDefIds,
def_ids,
SolverDefId,
SolverDefId,
);
impl_foldable_for_interned_slice!(SolverDefIds);
#[salsa::interned(constructor = new_, debug)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct ExternalConstraints<'db> {
#[returns(ref)]
kind_: rustc_type_ir::solve::ExternalConstraintsData<DbInterner<'db>>,
interned: InternedRef<'db, ExternalConstraintsInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct ExternalConstraintsInterned(ExternalConstraintsData<'static>);
impl_internable!(gc; ExternalConstraintsInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<ExternalConstraints<'static>>();
};
impl<'db> ExternalConstraints<'db> {
pub fn new(interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self {
ExternalConstraints::new_(interner.db(), data)
#[inline]
pub fn new(_interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self {
let data = unsafe {
std::mem::transmute::<ExternalConstraintsData<'db>, ExternalConstraintsData<'static>>(
data,
)
};
Self { interned: Interned::new_gc(ExternalConstraintsInterned(data)) }
}
#[inline]
pub fn inner(&self) -> &ExternalConstraintsData<'db> {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<&ExternalConstraintsData<'static>, &ExternalConstraintsData<'db>>(
inner,
)
}
}
}
@ -43,6 +80,12 @@ impl<'db> std::ops::Deref for ExternalConstraints<'db> {
}
}
impl std::fmt::Debug for ExternalConstraints<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().fmt(f)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for ExternalConstraints<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -2,20 +2,25 @@
use std::cmp::Ordering;
use macros::{TypeFoldable, TypeVisitable};
use intern::{
Interned, InternedRef, InternedSlice, InternedSliceRef, impl_internable, impl_slice_internable,
};
use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable};
use rustc_type_ir::{
self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags,
PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, Upcast, UpcastFrom, WithCachedTypeInfo,
self as ty, CollectAndApply, EarlyBinder, FlagComputation, Flags, GenericTypeVisitable,
PredicatePolarity, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, Upcast,
UpcastFrom, WithCachedTypeInfo,
elaborate::Elaboratable,
error::{ExpectedFound, TypeError},
inherent::{IntoKind, SliceLike},
};
use smallvec::SmallVec;
use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper};
use crate::next_solver::{
GenericArg, TraitIdWrapper, impl_foldable_for_interned_slice, impl_stored_interned_slice,
interned_slice,
};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty};
pub type BoundExistentialPredicate<'db> = Binder<'db, ExistentialPredicate<'db>>;
@ -68,7 +73,15 @@ fn stable_cmp_existential_predicate<'db>(
(ExistentialPredicate::AutoTrait(_), _) => Ordering::Greater,
}
}
interned_vec_db!(BoundExistentialPredicates, BoundExistentialPredicate);
interned_slice!(
BoundExistentialPredicatesStorage,
BoundExistentialPredicates,
StoredBoundExistentialPredicates,
bound_existential_predicates,
BoundExistentialPredicate<'db>,
BoundExistentialPredicate<'static>,
);
impl_foldable_for_interned_slice!(BoundExistentialPredicates);
impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates<DbInterner<'db>>
for BoundExistentialPredicates<'db>
@ -82,7 +95,7 @@ impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates<DbInterner<'db>>
) -> Option<
rustc_type_ir::Binder<DbInterner<'db>, rustc_type_ir::ExistentialTraitRef<DbInterner<'db>>>,
> {
self.inner()[0]
self[0]
.map_bound(|this| match this {
ExistentialPredicate::Trait(tr) => Some(tr),
_ => None,
@ -166,74 +179,50 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for BoundExistentialPre
},
);
CollectAndApply::collect_and_apply(v, |g| {
BoundExistentialPredicates::new_from_iter(interner, g.iter().cloned())
})
BoundExistentialPredicates::new_from_iter(interner, v)
}
}
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Predicate<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>>,
interned: InternedRef<'db, PredicateInterned>,
}
impl<'db> std::fmt::Debug for Predicate<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().internee.fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct PredicateInterned(WithCachedTypeInfo<Binder<'static, PredicateKind<'static>>>);
impl<'db> std::fmt::Debug
for InternedWrapperNoDebug<WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Binder<")?;
match self.0.internee.skip_binder() {
rustc_type_ir::PredicateKind::Clause(clause_kind) => {
write!(f, "{clause_kind:?}")
}
rustc_type_ir::PredicateKind::DynCompatible(trait_def_id) => {
write!(f, "the trait `{trait_def_id:?}` is dyn-compatible")
}
rustc_type_ir::PredicateKind::Subtype(subtype_predicate) => {
write!(f, "{subtype_predicate:?}")
}
rustc_type_ir::PredicateKind::Coerce(coerce_predicate) => {
write!(f, "{coerce_predicate:?}")
}
rustc_type_ir::PredicateKind::ConstEquate(c1, c2) => {
write!(f, "the constant `{c1:?}` equals `{c2:?}`")
}
rustc_type_ir::PredicateKind::Ambiguous => write!(f, "ambiguous"),
rustc_type_ir::PredicateKind::NormalizesTo(data) => write!(f, "{data:?}"),
rustc_type_ir::PredicateKind::AliasRelate(t1, t2, dir) => {
write!(f, "{t1:?} {dir:?} {t2:?}")
}
}?;
write!(f, ", [{:?}]>", self.0.internee.bound_vars())?;
Ok(())
}
}
impl_internable!(gc; PredicateInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Predicate<'static>>();
};
impl<'db> Predicate<'db> {
pub fn new(interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self {
pub fn new(_interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self {
let kind = unsafe {
std::mem::transmute::<
Binder<'db, PredicateKind<'db>>,
Binder<'static, PredicateKind<'static>>,
>(kind)
};
let flags = FlagComputation::for_predicate(kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Predicate::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(PredicateInterned(cached)) }
}
pub fn inner(&self) -> &WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<Binder<'static, PredicateKind<'static>>>,
&WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>,
>(inner)
}
}
/// Flips the polarity of a Predicate.
@ -259,110 +248,135 @@ impl<'db> Predicate<'db> {
}
}
// FIXME: should make a "header" in interned_vec
#[derive(Debug, Clone)]
pub struct InternedClausesWrapper<'db>(SmallVec<[Clause<'db>; 2]>, TypeFlags, DebruijnIndex);
impl<'db> PartialEq for InternedClausesWrapper<'db> {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
impl<'db> std::fmt::Debug for Predicate<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
impl<'db> Eq for InternedClausesWrapper<'db> {}
#[derive(Clone, Copy, PartialEq, Eq, Hash, GenericTypeVisitable)]
pub struct ClausesCachedTypeInfo(WithCachedTypeInfo<()>);
impl<'db> std::hash::Hash for InternedClausesWrapper<'db> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
}
impl_slice_internable!(gc; ClausesStorage, ClausesCachedTypeInfo, Clause<'static>);
impl_stored_interned_slice!(ClausesStorage, Clauses, StoredClauses);
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Clauses<'db> {
#[returns(ref)]
inner_: InternedClausesWrapper<'db>,
}
impl<'db> Clauses<'db> {
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = Clause<'db>>,
) -> Self {
let clauses: SmallVec<_> = data.into_iter().collect();
let flags = FlagComputation::<DbInterner<'db>>::for_clauses(&clauses);
let wrapper = InternedClausesWrapper(clauses, flags.flags, flags.outer_exclusive_binder);
Clauses::new_(interner.db(), wrapper)
}
pub fn inner(&self) -> &InternedClausesWrapper<'db> {
crate::with_attached_db(|db| {
let inner = self.inner_(db);
// SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
}
interned: InternedSliceRef<'db, ClausesStorage>,
}
impl<'db> std::fmt::Debug for Clauses<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().0.fmt(f)
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_slice().fmt(fmt)
}
}
impl<'db> rustc_type_ir::inherent::Clauses<DbInterner<'db>> for Clauses<'db> {}
impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> {
type Item = Clause<'db>;
type IntoIter = <smallvec::SmallVec<[Clause<'db>; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().0.clone().into_iter()
impl<'db> Clauses<'db> {
#[inline]
pub fn empty(_interner: DbInterner<'db>) -> Self {
// FIXME: Get from a static.
Self::new_from_slice(&[])
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().0.as_slice()
#[inline]
pub fn new_from_slice(slice: &[Clause<'db>]) -> Self {
let slice = unsafe { ::std::mem::transmute::<&[Clause<'db>], &[Clause<'static>]>(slice) };
let flags = FlagComputation::<DbInterner<'db>>::for_clauses(slice);
let flags = ClausesCachedTypeInfo(WithCachedTypeInfo {
internee: (),
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
});
Self { interned: InternedSlice::from_header_and_slice(flags, slice) }
}
#[inline]
pub fn new_from_iter<I, T>(_interner: DbInterner<'db>, args: I) -> T::Output
where
I: IntoIterator<Item = T>,
T: CollectAndApply<Clause<'db>, Self>,
{
CollectAndApply::collect_and_apply(args.into_iter(), Self::new_from_slice)
}
#[inline]
pub fn as_slice(self) -> &'db [Clause<'db>] {
let slice = &self.interned.get().slice;
unsafe { ::std::mem::transmute::<&[Clause<'static>], &[Clause<'db>]>(slice) }
}
#[inline]
pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>> {
self.as_slice().iter().copied()
}
#[inline]
pub fn len(self) -> usize {
self.as_slice().len()
}
#[inline]
pub fn is_empty(self) -> bool {
self.as_slice().is_empty()
}
}
impl<'db> IntoIterator for Clauses<'db> {
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>;
type Item = Clause<'db>;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
#[inline]
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
self.iter()
}
}
impl<'db> std::ops::Deref for Clauses<'db> {
type Target = [Clause<'db>];
#[inline]
fn deref(&self) -> &Self::Target {
(*self).as_slice()
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> {
type Item = Clause<'db>;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>;
#[inline]
fn iter(self) -> Self::IntoIter {
self.iter()
}
#[inline]
fn as_slice(&self) -> &[Self::Item] {
(*self).as_slice()
}
}
impl<'db> Default for Clauses<'db> {
#[inline]
fn default() -> Self {
Clauses::new_from_iter(DbInterner::conjure(), [])
Clauses::empty(DbInterner::conjure())
}
}
impl<'db> rustc_type_ir::inherent::Clauses<DbInterner<'db>> for Clauses<'db> {}
impl<'db> rustc_type_ir::TypeSuperFoldable<DbInterner<'db>> for Clauses<'db> {
fn try_super_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len());
for c in self {
clauses.push(c.try_fold_with(folder)?);
}
Ok(Clauses::new_from_iter(folder.cx(), clauses))
Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.try_fold_with(folder)))
}
fn super_fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len());
for c in self {
clauses.push(c.fold_with(folder));
}
Clauses::new_from_iter(folder.cx(), clauses)
Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.fold_with(folder)))
}
}
@ -371,15 +385,10 @@ impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for Clauses<'db> {
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok(Clauses::new_from_iter(folder.cx(), inner))
self.try_super_fold_with(folder)
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> = self.iter().map(|v| v.fold_with(folder)).collect();
Clauses::new_from_iter(folder.cx(), inner)
self.super_fold_with(folder)
}
}
@ -389,19 +398,28 @@ impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for Clauses<'db> {
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
rustc_ast_ir::walk_visitable_list!(visitor, self.iter());
V::Result::output()
}
}
impl<'db, V: super::WorldExposer> rustc_type_ir::GenericTypeVisitable<V> for Clauses<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned_slice(self.interned).is_continue() {
self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor));
}
}
}
impl<'db> rustc_type_ir::Flags for Clauses<'db> {
#[inline]
fn flags(&self) -> rustc_type_ir::TypeFlags {
self.inner().1
self.interned.header.header.0.flags
}
#[inline]
fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex {
self.inner().2
self.interned.header.header.0.outer_exclusive_binder
}
}
@ -414,18 +432,20 @@ impl<'db> rustc_type_ir::TypeSuperVisitable<DbInterner<'db>> for Clauses<'db> {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] // TODO implement Debug by hand
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, GenericTypeVisitable)] // TODO implement Debug by hand
pub struct Clause<'db>(pub(crate) Predicate<'db>);
// We could cram the reveal into the clauses like rustc does, probably
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
#[derive(
Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable,
)]
pub struct ParamEnv<'db> {
pub(crate) clauses: Clauses<'db>,
}
impl<'db> ParamEnv<'db> {
pub fn empty() -> Self {
ParamEnv { clauses: Clauses::new_from_iter(DbInterner::conjure(), []) }
ParamEnv { clauses: Clauses::empty(DbInterner::conjure()) }
}
pub fn clauses(self) -> Clauses<'db> {
@ -460,6 +480,14 @@ impl<'db> TypeVisitable<DbInterner<'db>> for Predicate<'db> {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Predicate<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeSuperVisitable<DbInterner<'db>> for Predicate<'db> {
fn super_visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -1,47 +1,53 @@
//! Things related to regions.
use hir_def::LifetimeParamId;
use intern::Symbol;
use intern::{Interned, InternedRef, Symbol, impl_internable};
use macros::GenericTypeVisitable;
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags,
TypeFoldable, TypeVisitable,
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, GenericTypeVisitable, INNERMOST, RegionVid,
TypeFlags, TypeFoldable, TypeVisitable,
inherent::{IntoKind, PlaceholderLike, SliceLike},
relate::Relate,
};
use crate::next_solver::{GenericArg, OutlivesPredicate};
use crate::next_solver::{
GenericArg, OutlivesPredicate, impl_foldable_for_interned_slice, impl_stored_interned,
interned_slice,
};
use super::{
ErrorGuaranteed, SolverDefId, interned_vec_db,
SolverDefId,
interner::{BoundVarKind, DbInterner, Placeholder},
};
pub type RegionKind<'db> = rustc_type_ir::RegionKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Region<'db> {
#[returns(ref)]
kind_: RegionKind<'db>,
pub(super) interned: InternedRef<'db, RegionInterned>,
}
impl std::fmt::Debug for Region<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct RegionInterned(RegionKind<'static>);
impl_internable!(gc; RegionInterned);
impl_stored_interned!(RegionInterned, Region, StoredRegion);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Region<'static>>();
};
impl<'db> Region<'db> {
pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self {
Region::new_(interner.db(), kind)
pub fn new(_interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<RegionKind<'db>, RegionKind<'static>>(kind) };
Self { interned: Interned::new_gc(RegionInterned(kind)) }
}
pub fn inner(&self) -> &RegionKind<'db> {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&RegionKind<'static>, &RegionKind<'db>>(inner) }
}
pub fn new_early_param(
@ -60,7 +66,7 @@ impl<'db> Region<'db> {
}
pub fn new_erased(interner: DbInterner<'db>) -> Region<'db> {
Region::new(interner, RegionKind::ReErased)
interner.default_types().regions.erased
}
pub fn new_bound(
@ -92,7 +98,7 @@ impl<'db> Region<'db> {
}
pub fn error(interner: DbInterner<'db>) -> Self {
Region::new(interner, RegionKind::ReError(ErrorGuaranteed))
interner.default_types().regions.error
}
pub fn type_flags(&self) -> TypeFlags {
@ -256,6 +262,12 @@ impl BoundRegionKind {
}
}
impl std::fmt::Debug for Region<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
impl<'db> IntoKind for Region<'db> {
type Kind = RegionKind<'db>;
@ -342,7 +354,7 @@ impl<'db> rustc_type_ir::inherent::Region<DbInterner<'db>> for Region<'db> {
}
fn new_static(interner: DbInterner<'db>) -> Self {
Region::new(interner, RegionKind::ReStatic)
interner.default_types().regions.statik
}
fn new_placeholder(
@ -377,6 +389,22 @@ impl<'db> PlaceholderLike<DbInterner<'db>> for PlaceholderRegion {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Region<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
type GenericArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>;
interned_vec_db!(RegionAssumptions, GenericArgOutlivesPredicate);
interned_slice!(
RegionAssumptionsStorage,
RegionAssumptions,
StoredRegionAssumptions,
region_assumptions,
GenericArgOutlivesPredicate<'db>,
GenericArgOutlivesPredicate<'static>,
);
impl_foldable_for_interned_slice!(RegionAssumptions);

View file

@ -5,7 +5,7 @@ use rustc_next_trait_solver::delegate::SolverDelegate;
use rustc_type_ir::{
AliasTyKind, GenericArgKind, InferCtxtLike, Interner, PredicatePolarity, TypeFlags,
TypeVisitableExt,
inherent::{IntoKind, SliceLike, Term as _, Ty as _},
inherent::{IntoKind, Term as _, Ty as _},
lang_items::SolverTraitLangItem,
solve::{Certainty, NoSolution},
};

View file

@ -7,13 +7,15 @@ use hir_def::{
hir::generics::{TypeOrConstParamData, TypeParamProvenance},
};
use hir_def::{TraitId, type_ref::Rawness};
use intern::{Interned, InternedRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_abi::{Float, Integer, Size};
use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
use rustc_type_ir::{
AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, CoroutineArgs, CoroutineArgsParts,
DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner,
TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, GenericTypeVisitable, InferTy, IntTy,
IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
inherent::{
AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _,
IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _,
@ -28,15 +30,15 @@ use crate::{
lower::GenericPredicates,
next_solver::{
AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
CoroutineIdWrapper, FnSig, GenericArgKind, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
abi::Safety,
interner::InternedWrapperNoDebug,
impl_foldable_for_interned_slice, impl_stored_interned, interned_slice,
util::{CoroutineArgsExt, IntegerTypeExt},
},
};
use super::{
BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, interned_vec_db,
BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId,
util::{FloatExt, IntegerExt},
};
@ -44,35 +46,45 @@ pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<SolverDefId
pub type TyKind<'db> = rustc_type_ir::TyKind<DbInterner<'db>>;
pub type FnHeader<'db> = rustc_type_ir::FnHeader<DbInterner<'db>>;
#[salsa::interned(constructor = new_)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Ty<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<TyKind<'db>>>,
pub(super) interned: InternedRef<'db, TyInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct TyInterned(WithCachedTypeInfo<TyKind<'static>>);
impl_internable!(gc; TyInterned);
impl_stored_interned!(TyInterned, Ty, StoredTy);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Ty<'static>>();
};
impl<'db> Ty<'db> {
pub fn new(interner: DbInterner<'db>, kind: TyKind<'db>) -> Self {
#[inline]
pub fn new(_interner: DbInterner<'db>, kind: TyKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<TyKind<'db>, TyKind<'static>>(kind) };
let flags = FlagComputation::for_kind(&kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Ty::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(TyInterned(cached)) }
}
#[inline]
pub fn inner(&self) -> &WithCachedTypeInfo<TyKind<'db>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<TyKind<'static>>,
&WithCachedTypeInfo<TyKind<'db>>,
>(inner)
}
}
pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self {
@ -99,16 +111,39 @@ impl<'db> Ty<'db> {
Ty::new_infer(interner, InferTy::FloatVar(v))
}
#[inline]
pub fn new_int(interner: DbInterner<'db>, i: IntTy) -> Self {
Ty::new(interner, TyKind::Int(i))
let types = interner.default_types();
match i {
IntTy::Isize => types.types.isize,
IntTy::I8 => types.types.i8,
IntTy::I16 => types.types.i16,
IntTy::I32 => types.types.i32,
IntTy::I64 => types.types.i64,
IntTy::I128 => types.types.i128,
}
}
pub fn new_uint(interner: DbInterner<'db>, ui: UintTy) -> Self {
Ty::new(interner, TyKind::Uint(ui))
let types = interner.default_types();
match ui {
UintTy::Usize => types.types.usize,
UintTy::U8 => types.types.u8,
UintTy::U16 => types.types.u16,
UintTy::U32 => types.types.u32,
UintTy::U64 => types.types.u64,
UintTy::U128 => types.types.u128,
}
}
pub fn new_float(interner: DbInterner<'db>, f: FloatTy) -> Self {
Ty::new(interner, TyKind::Float(f))
let types = interner.default_types();
match f {
FloatTy::F16 => types.types.f16,
FloatTy::F32 => types.types.f32,
FloatTy::F64 => types.types.f64,
FloatTy::F128 => types.types.f128,
}
}
pub fn new_fresh(interner: DbInterner<'db>, n: u32) -> Self {
@ -124,7 +159,7 @@ impl<'db> Ty<'db> {
}
pub fn new_empty_tuple(interner: DbInterner<'db>) -> Self {
Ty::new_tup(interner, &[])
interner.default_types().types.unit
}
pub fn new_imm_ptr(interner: DbInterner<'db>, ty: Ty<'db>) -> Self {
@ -383,7 +418,7 @@ impl<'db> Ty<'db> {
#[inline]
pub fn is_unit(self) -> bool {
matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty())
matches!(self.kind(), TyKind::Tuple(tys) if tys.is_empty())
}
#[inline]
@ -555,34 +590,34 @@ impl<'db> Ty<'db> {
interner: DbInterner<'db>,
ty: hir_def::builtin_type::BuiltinType,
) -> Ty<'db> {
let kind = match ty {
hir_def::builtin_type::BuiltinType::Char => TyKind::Char,
hir_def::builtin_type::BuiltinType::Bool => TyKind::Bool,
hir_def::builtin_type::BuiltinType::Str => TyKind::Str,
hir_def::builtin_type::BuiltinType::Int(int) => TyKind::Int(match int {
hir_def::builtin_type::BuiltinInt::Isize => rustc_type_ir::IntTy::Isize,
hir_def::builtin_type::BuiltinInt::I8 => rustc_type_ir::IntTy::I8,
hir_def::builtin_type::BuiltinInt::I16 => rustc_type_ir::IntTy::I16,
hir_def::builtin_type::BuiltinInt::I32 => rustc_type_ir::IntTy::I32,
hir_def::builtin_type::BuiltinInt::I64 => rustc_type_ir::IntTy::I64,
hir_def::builtin_type::BuiltinInt::I128 => rustc_type_ir::IntTy::I128,
}),
hir_def::builtin_type::BuiltinType::Uint(uint) => TyKind::Uint(match uint {
hir_def::builtin_type::BuiltinUint::Usize => rustc_type_ir::UintTy::Usize,
hir_def::builtin_type::BuiltinUint::U8 => rustc_type_ir::UintTy::U8,
hir_def::builtin_type::BuiltinUint::U16 => rustc_type_ir::UintTy::U16,
hir_def::builtin_type::BuiltinUint::U32 => rustc_type_ir::UintTy::U32,
hir_def::builtin_type::BuiltinUint::U64 => rustc_type_ir::UintTy::U64,
hir_def::builtin_type::BuiltinUint::U128 => rustc_type_ir::UintTy::U128,
}),
hir_def::builtin_type::BuiltinType::Float(float) => TyKind::Float(match float {
hir_def::builtin_type::BuiltinFloat::F16 => rustc_type_ir::FloatTy::F16,
hir_def::builtin_type::BuiltinFloat::F32 => rustc_type_ir::FloatTy::F32,
hir_def::builtin_type::BuiltinFloat::F64 => rustc_type_ir::FloatTy::F64,
hir_def::builtin_type::BuiltinFloat::F128 => rustc_type_ir::FloatTy::F128,
}),
};
Ty::new(interner, kind)
let types = interner.default_types();
match ty {
hir_def::builtin_type::BuiltinType::Char => types.types.char,
hir_def::builtin_type::BuiltinType::Bool => types.types.bool,
hir_def::builtin_type::BuiltinType::Str => types.types.str,
hir_def::builtin_type::BuiltinType::Int(int) => match int {
hir_def::builtin_type::BuiltinInt::Isize => types.types.isize,
hir_def::builtin_type::BuiltinInt::I8 => types.types.i8,
hir_def::builtin_type::BuiltinInt::I16 => types.types.i16,
hir_def::builtin_type::BuiltinInt::I32 => types.types.i32,
hir_def::builtin_type::BuiltinInt::I64 => types.types.i64,
hir_def::builtin_type::BuiltinInt::I128 => types.types.i128,
},
hir_def::builtin_type::BuiltinType::Uint(uint) => match uint {
hir_def::builtin_type::BuiltinUint::Usize => types.types.usize,
hir_def::builtin_type::BuiltinUint::U8 => types.types.u8,
hir_def::builtin_type::BuiltinUint::U16 => types.types.u16,
hir_def::builtin_type::BuiltinUint::U32 => types.types.u32,
hir_def::builtin_type::BuiltinUint::U64 => types.types.u64,
hir_def::builtin_type::BuiltinUint::U128 => types.types.u128,
},
hir_def::builtin_type::BuiltinType::Float(float) => match float {
hir_def::builtin_type::BuiltinFloat::F16 => types.types.f16,
hir_def::builtin_type::BuiltinFloat::F32 => types.types.f32,
hir_def::builtin_type::BuiltinFloat::F64 => types.types.f64,
hir_def::builtin_type::BuiltinFloat::F128 => types.types.f128,
},
}
}
pub fn as_builtin(self) -> Option<hir_def::builtin_type::BuiltinType> {
@ -661,10 +696,10 @@ impl<'db> Ty<'db> {
// This is only used by type walking.
// Parameters will be walked outside, and projection predicate is not used.
// So just provide the Future trait.
let impl_bound = TraitRef::new(
let impl_bound = TraitRef::new_from_args(
interner,
future_trait.into(),
GenericArgs::new_from_iter(interner, []),
GenericArgs::empty(interner),
)
.upcast(interner);
Some(vec![impl_bound])
@ -730,20 +765,23 @@ impl<'db> std::fmt::Debug for Ty<'db> {
}
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<WithCachedTypeInfo<TyKind<'db>>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.internee.fmt(f)
}
}
impl<'db> IntoKind for Ty<'db> {
type Kind = TyKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
self.inner().internee
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Ty<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Ty<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
@ -942,19 +980,19 @@ impl<'db> Flags for Ty<'db> {
impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
fn new_unit(interner: DbInterner<'db>) -> Self {
Ty::new(interner, TyKind::Tuple(Default::default()))
interner.default_types().types.unit
}
fn new_bool(interner: DbInterner<'db>) -> Self {
Ty::new(interner, TyKind::Bool)
interner.default_types().types.bool
}
fn new_u8(interner: DbInterner<'db>) -> Self {
Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::U8))
interner.default_types().types.u8
}
fn new_usize(interner: DbInterner<'db>) -> Self {
Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::Usize))
interner.default_types().types.usize
}
fn new_infer(interner: DbInterner<'db>, var: rustc_type_ir::InferTy) -> Self {
@ -1068,9 +1106,9 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
// to unnecessary overflows in async code. See the issue:
// <https://github.com/rust-lang/rust/issues/145151>.
let coroutine_args = interner.mk_args_from_iter(coroutine_args.iter().map(|arg| {
match arg {
GenericArg::Ty(_) | GenericArg::Const(_) => arg,
GenericArg::Lifetime(_) => {
match arg.kind() {
GenericArgKind::Type(_) | GenericArgKind::Const(_) => arg,
GenericArgKind::Lifetime(_) => {
crate::next_solver::Region::new(interner, rustc_type_ir::RegionKind::ReErased)
.into()
}
@ -1105,7 +1143,7 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
}
fn new_tup(interner: DbInterner<'db>, tys: &[<DbInterner<'db> as Interner>::Ty]) -> Self {
Ty::new(interner, TyKind::Tuple(Tys::new_from_iter(interner, tys.iter().cloned())))
Ty::new(interner, TyKind::Tuple(Tys::new_from_slice(tys)))
}
fn new_tup_from_iter<It, T>(interner: DbInterner<'db>, iter: It) -> T::Output
@ -1177,10 +1215,11 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
}
fn from_closure_kind(interner: DbInterner<'db>, kind: rustc_type_ir::ClosureKind) -> Self {
let types = interner.default_types();
match kind {
ClosureKind::Fn => Ty::new(interner, TyKind::Int(IntTy::I8)),
ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)),
ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)),
ClosureKind::Fn => types.types.i8,
ClosureKind::FnMut => types.types.i16,
ClosureKind::FnOnce => types.types.i32,
}
}
@ -1188,9 +1227,10 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
interner: DbInterner<'db>,
kind: rustc_type_ir::ClosureKind,
) -> Self {
let types = interner.default_types();
match kind {
ClosureKind::Fn | ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)),
ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)),
ClosureKind::Fn | ClosureKind::FnMut => types.types.i16,
ClosureKind::FnOnce => types.types.i32,
}
}
@ -1237,7 +1277,7 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
| TyKind::Tuple(_)
| TyKind::Error(_)
| TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => {
Ty::new(interner, TyKind::Uint(UintTy::U8))
interner.default_types().types.u8
}
TyKind::Bound(..)
@ -1254,20 +1294,19 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
}
}
interned_vec_db!(Tys, Ty);
interned_slice!(TysStorage, Tys, StoredTys, tys, Ty<'db>, Ty<'static>);
impl_foldable_for_interned_slice!(Tys);
impl<'db> Tys<'db> {
pub fn inputs(&self) -> &[Ty<'db>] {
#[inline]
pub fn inputs(self) -> &'db [Ty<'db>] {
self.as_slice().split_last().unwrap().1
}
}
impl<'db> rustc_type_ir::inherent::Tys<DbInterner<'db>> for Tys<'db> {
fn inputs(self) -> <DbInterner<'db> as Interner>::FnInputTys {
Tys::new_from_iter(
DbInterner::conjure(),
self.as_slice().split_last().unwrap().1.iter().copied(),
)
self.as_slice().split_last().unwrap().1
}
fn output(self) -> <DbInterner<'db> as Interner>::Ty {
@ -1323,6 +1362,10 @@ pub enum BoundTyKind {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct ErrorGuaranteed;
impl<V> GenericTypeVisitable<V> for ErrorGuaranteed {
fn generic_visit_with(&self, _visitor: &mut V) {}
}
impl<'db> TypeVisitable<DbInterner<'db>> for ErrorGuaranteed {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -77,9 +77,10 @@ pub trait IntegerTypeExt {
impl IntegerTypeExt for IntegerType {
fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> {
let types = interner.default_types();
match self {
IntegerType::Pointer(true) => Ty::new(interner, TyKind::Int(IntTy::Isize)),
IntegerType::Pointer(false) => Ty::new(interner, TyKind::Uint(UintTy::Usize)),
IntegerType::Pointer(true) => types.types.isize,
IntegerType::Pointer(false) => types.types.usize,
IntegerType::Fixed(i, s) => i.to_ty(interner, *s),
}
}
@ -120,17 +121,18 @@ impl IntegerExt for Integer {
#[inline]
fn to_ty<'db>(&self, interner: DbInterner<'db>, signed: bool) -> Ty<'db> {
use Integer::*;
let types = interner.default_types();
match (*self, signed) {
(I8, false) => Ty::new(interner, TyKind::Uint(UintTy::U8)),
(I16, false) => Ty::new(interner, TyKind::Uint(UintTy::U16)),
(I32, false) => Ty::new(interner, TyKind::Uint(UintTy::U32)),
(I64, false) => Ty::new(interner, TyKind::Uint(UintTy::U64)),
(I128, false) => Ty::new(interner, TyKind::Uint(UintTy::U128)),
(I8, true) => Ty::new(interner, TyKind::Int(IntTy::I8)),
(I16, true) => Ty::new(interner, TyKind::Int(IntTy::I16)),
(I32, true) => Ty::new(interner, TyKind::Int(IntTy::I32)),
(I64, true) => Ty::new(interner, TyKind::Int(IntTy::I64)),
(I128, true) => Ty::new(interner, TyKind::Int(IntTy::I128)),
(I8, false) => types.types.u8,
(I16, false) => types.types.u16,
(I32, false) => types.types.u32,
(I64, false) => types.types.u64,
(I128, false) => types.types.u128,
(I8, true) => types.types.i8,
(I16, true) => types.types.i16,
(I32, true) => types.types.i32,
(I64, true) => types.types.i64,
(I128, true) => types.types.i128,
}
}
@ -214,11 +216,12 @@ impl FloatExt for Float {
#[inline]
fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> {
use Float::*;
let types = interner.default_types();
match *self {
F16 => Ty::new(interner, TyKind::Float(FloatTy::F16)),
F32 => Ty::new(interner, TyKind::Float(FloatTy::F32)),
F64 => Ty::new(interner, TyKind::Float(FloatTy::F64)),
F128 => Ty::new(interner, TyKind::Float(FloatTy::F128)),
F16 => types.types.f16,
F32 => types.types.f32,
F64 => types.types.f64,
F128 => types.types.f128,
}
}
@ -244,13 +247,7 @@ impl PrimitiveExt for Primitive {
match *self {
Primitive::Int(i, signed) => i.to_ty(interner, signed),
Primitive::Float(f) => f.to_ty(interner),
Primitive::Pointer(_) => Ty::new(
interner,
TyKind::RawPtr(
Ty::new(interner, TyKind::Tuple(Default::default())),
rustc_ast_ir::Mutability::Mut,
),
),
Primitive::Pointer(_) => interner.default_types().types.mut_unit_ptr,
}
}
@ -283,7 +280,7 @@ impl<'db> CoroutineArgsExt<'db> for CoroutineArgs<DbInterner<'db>> {
/// The type of the state discriminant used in the coroutine type.
#[inline]
fn discr_ty(&self, interner: DbInterner<'db>) -> Ty<'db> {
Ty::new(interner, TyKind::Uint(UintTy::U32))
interner.default_types().types.u32
}
}

View file

@ -13,7 +13,7 @@ use crate::{
db::{HirDatabase, InternedOpaqueTyId},
lower::{ImplTraitIdx, ImplTraits},
next_solver::{
DbInterner, EarlyBinder, ErrorGuaranteed, SolverDefId, Ty, TypingMode,
DbInterner, ErrorGuaranteed, SolverDefId, StoredEarlyBinder, StoredTy, Ty, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -72,10 +72,10 @@ pub(crate) fn opaque_types_defined_by(
// FIXME: Collect opaques from `#[define_opaque]`.
fn extend_with_opaques<'db>(
db: &'db dyn HirDatabase,
opaques: &Option<Box<EarlyBinder<'db, ImplTraits<'db>>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>,
fn extend_with_opaques(
db: &dyn HirDatabase,
opaques: &Option<Box<StoredEarlyBinder<ImplTraits>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx) -> ImplTraitId,
result: &mut Vec<SolverDefId>,
) {
if let Some(opaques) = opaques {
@ -89,25 +89,25 @@ pub(crate) fn opaque_types_defined_by(
// These are firewall queries to prevent drawing dependencies between infers:
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
#[salsa::tracked(returns(ref))]
pub(crate) fn rpit_hidden_types<'db>(
db: &'db dyn HirDatabase,
function: FunctionId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
) -> ArenaMap<ImplTraitIdx, StoredEarlyBinder<StoredTy>> {
let infer = InferenceResult::for_body(db, function.into());
let mut result = ArenaMap::new();
for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) {
result.insert(opaque, EarlyBinder::bind(hidden_type));
result.insert(opaque, StoredEarlyBinder::bind(hidden_type.store()));
}
result.shrink_to_fit();
result
}
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
#[salsa::tracked(returns(ref))]
pub(crate) fn tait_hidden_types<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
) -> ArenaMap<ImplTraitIdx, StoredEarlyBinder<StoredTy>> {
// Call this first, to not perform redundant work if there are no TAITs.
let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias)
.as_deref()
@ -129,7 +129,7 @@ pub(crate) fn tait_hidden_types<'db>(
let mut result = ArenaMap::with_capacity(taits_count);
for defining_body in defining_bodies {
let infer = InferenceResult::for_body(db, defining_body);
for (&opaque, &hidden_type) in &infer.type_of_opaque {
for (&opaque, hidden_type) in &infer.type_of_opaque {
let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else {
continue;
};
@ -138,13 +138,18 @@ pub(crate) fn tait_hidden_types<'db>(
}
// In the presence of errors, we attempt to create a unified type from all
// types. rustc doesn't do that, but this should improve the experience.
let hidden_type = infcx.insert_type_vars(hidden_type);
let hidden_type = infcx.insert_type_vars(hidden_type.as_ref());
match result.entry(opaque_idx) {
la_arena::Entry::Vacant(entry) => {
entry.insert(EarlyBinder::bind(hidden_type));
entry.insert(StoredEarlyBinder::bind(hidden_type.store()));
}
la_arena::Entry::Occupied(entry) => {
_ = ocx.eq(&cause, param_env, entry.get().instantiate_identity(), hidden_type);
_ = ocx.eq(
&cause,
param_env,
entry.get().get().instantiate_identity(),
hidden_type,
);
}
}
}
@ -157,12 +162,15 @@ pub(crate) fn tait_hidden_types<'db>(
let idx = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx as u32));
match result.entry(idx) {
la_arena::Entry::Vacant(entry) => {
entry.insert(EarlyBinder::bind(Ty::new_error(interner, ErrorGuaranteed)));
entry.insert(StoredEarlyBinder::bind(
Ty::new_error(interner, ErrorGuaranteed).store(),
));
}
la_arena::Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().map_bound(|hidden_type| {
infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner)
});
let hidden_type = entry.get().get().skip_binder();
let hidden_type =
infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner);
*entry.get_mut() = StoredEarlyBinder::bind(hidden_type.store());
}
}
}

View file

@ -2,7 +2,6 @@
use hir_def::{HasModule, ImplId, nameres::crate_def_map};
use intern::sym;
use rustc_type_ir::inherent::SliceLike;
use tracing::debug;
use crate::{
@ -22,6 +21,7 @@ use crate::{
// cannot create a cycle, but a cycle handler is required nevertheless.
fn specializes_query_cycle(
_db: &dyn HirDatabase,
_: salsa::Id,
_specializing_impl_def_id: ImplId,
_parent_impl_def_id: ImplId,
) -> bool {

View file

@ -149,9 +149,10 @@ fn check_impl(
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = InferenceResult::for_body(&db, def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
for (pat, ty) in inference_result.type_of_pat.iter() {
let mut ty = ty.as_ref();
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
ty = inference_result.type_of_binding[id].as_ref();
}
let node = match pat_node(&body_source_map, pat, &db) {
Some(value) => value,
@ -169,6 +170,7 @@ fn check_impl(
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let ty = ty.as_ref();
let node = match expr_node(&body_source_map, expr, &db) {
Some(value) => value,
None => continue,
@ -209,8 +211,8 @@ fn check_impl(
let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!(
"expected {}, got {}",
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target)
mismatch.expected.as_ref().display_test(&db, display_target),
mismatch.actual.as_ref().display_test(&db, display_target)
);
match mismatches.remove(&range) {
Some(annotation) => assert_eq!(actual, annotation),
@ -318,20 +320,20 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
crate::attach_db(&db, || {
let mut buf = String::new();
let mut infer_def = |inference_result: &InferenceResult<'_>,
let mut infer_def = |inference_result: &InferenceResult,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>,
krate: Crate| {
let display_target = DisplayTarget::from_crate(&db, krate);
let mut types: Vec<(InFile<SyntaxNode>, &Ty<'_>)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch<'_>)> = Vec::new();
let mut types: Vec<(InFile<SyntaxNode>, Ty<'_>)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
if let Some(self_param) = body.self_param {
let ty = &inference_result.type_of_binding[self_param];
if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
let root = db.parse_or_expand(syntax_ptr.file_id);
let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
types.push((node, ty));
types.push((node, ty.as_ref()));
}
}
@ -346,7 +348,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
types.push((node.clone(), ty.as_ref()));
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
mismatches.push((node, mismatch));
}
@ -360,7 +362,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
types.push((node.clone(), ty.as_ref()));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
@ -401,8 +403,8 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
"{}{:?}: expected {}, got {}\n",
macro_prefix,
range,
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target),
mismatch.expected.as_ref().display_test(&db, display_target),
mismatch.actual.as_ref().display_test(&db, display_target),
);
}
}

View file

@ -74,6 +74,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
let place = capture.display_place(closure.0, db);
let capture_ty = capture
.ty
.get()
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate(db)))
.to_string();

Some files were not shown because too many files have changed in this diff Show more