Auto merge of #147675 - lnicola:sync-from-ra, r=lnicola

`rust-analyzer` subtree update

Subtree update of `rust-analyzer` to 6d4b23478d.

Created using https://github.com/rust-lang/josh-sync.

r? `@ghost`
This commit is contained in:
bors 2025-10-14 17:45:05 +00:00
commit 844264adda
219 changed files with 11113 additions and 9727 deletions

View file

@ -1021,7 +1021,7 @@ impl RustAnalyzer {
}
impl RustAnalyzer {
pub const ALLOW_FEATURES: &'static str = "rustc_private,proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink,proc_macro_def_site";
pub const ALLOW_FEATURES: &'static str = "rustc_private,proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink,proc_macro_def_site,new_zeroed_alloc";
}
impl Step for RustAnalyzer {

View file

@ -293,7 +293,7 @@ jobs:
timeout-minutes: 10
env:
FORCE_COLOR: 1
TYPOS_VERSION: v1.28.3
TYPOS_VERSION: v1.38.1
steps:
- name: download typos
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin

View file

@ -16,7 +16,7 @@ env:
RUSTFLAGS: "-D warnings -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
FETCH_DEPTH: 0 # pull in the tags for the version string
MACOSX_DEPLOYMENT_TARGET: 13.0
MACOSX_DEPLOYMENT_TARGET: 14.0
ZIG_VERSION: 0.13.0
ZIGBUILD_VERSION: 0.19.8
@ -52,7 +52,7 @@ jobs:
target: arm-unknown-linux-gnueabihf
zig_target: arm-unknown-linux-gnueabihf.2.28
code-target: linux-armhf
- os: macos-13
- os: macos-14
target: x86_64-apple-darwin
code-target: darwin-x64
pgo: clap-rs/clap@v4.5.36

View file

@ -32,6 +32,7 @@ makro = "makro"
trivias = "trivias"
thir = "thir"
jod = "jod"
tructure = "tructure"
[default.extend-identifiers]
anc = "anc"

File diff suppressed because it is too large Load diff

View file

@ -52,6 +52,7 @@ debug = 2
[workspace.dependencies]
# local crates
macros = { path = "./crates/macros", version = "0.0.0" }
base-db = { path = "./crates/base-db", version = "0.0.0" }
cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
hir = { path = "./crates/hir", version = "0.0.0" }
@ -87,14 +88,14 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.132", default-features = false }
ra-ap-rustc_parse_format = { version = "0.132", default-features = false }
ra-ap-rustc_index = { version = "0.132", default-features = false }
ra-ap-rustc_abi = { version = "0.132", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.132", default-features = false }
ra-ap-rustc_ast_ir = { version = "0.132", default-features = false }
ra-ap-rustc_type_ir = { version = "0.132", default-features = false }
ra-ap-rustc_next_trait_solver = { version = "0.132", default-features = false }
ra-ap-rustc_lexer = { version = "0.133", default-features = false }
ra-ap-rustc_parse_format = { version = "0.133", default-features = false }
ra-ap-rustc_index = { version = "0.133", default-features = false }
ra-ap-rustc_abi = { version = "0.133", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.133", default-features = false }
ra-ap-rustc_ast_ir = { version = "0.133", default-features = false }
ra-ap-rustc_type_ir = { version = "0.133", default-features = false }
ra-ap-rustc_next_trait_solver = { version = "0.133", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@ -137,12 +138,12 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.23.0", default-features = true, features = [
salsa = { version = "0.24.0", default-features = true, features = [
"rayon",
"salsa_unstable",
"macros",
] }
salsa-macros = "0.23.0"
salsa-macros = "0.24.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }

View file

@ -61,4 +61,4 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
rust-analyzer is primarily distributed under the terms of both the MIT
license and the Apache License (Version 2.0).
See LICENSE-APACHE and LICENSE-MIT for details.
See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details.

View file

@ -23,11 +23,7 @@ intern.workspace = true
[dev-dependencies]
expect-test = "1.5.1"
oorandom = "11.1.5"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
arbitrary = "1.4.1"
derive_arbitrary = "1.4.1"
arbitrary = { version = "1.4.1", features = ["derive"] }
# local deps
syntax-bridge.workspace = true

View file

@ -47,7 +47,7 @@ impl fmt::Display for CfgAtom {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(test, derive(derive_arbitrary::Arbitrary))]
#[cfg_attr(test, derive(arbitrary::Arbitrary))]
pub enum CfgExpr {
Invalid,
Atom(CfgAtom),

View file

@ -88,7 +88,7 @@ pub struct AssociatedTypeBinding {
}
/// A single generic argument.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GenericArg {
Type(TypeRefId),
Lifetime(LifetimeRefId),

View file

@ -349,6 +349,7 @@ bitflags::bitflags! {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
pub struct ImplFlags: u8 {
const NEGATIVE = 1 << 1;
const DEFAULT = 1 << 2;
const UNSAFE = 1 << 3;
}
}
@ -374,6 +375,9 @@ impl ImplSignature {
if src.value.excl_token().is_some() {
flags.insert(ImplFlags::NEGATIVE);
}
if src.value.default_token().is_some() {
flags.insert(ImplFlags::DEFAULT);
}
let (store, source_map, self_ty, target_trait, generic_params) =
crate::expr_store::lower::lower_impl(db, loc.container, src, id);
@ -389,6 +393,16 @@ impl ImplSignature {
Arc::new(source_map),
)
}
#[inline]
pub fn is_negative(&self) -> bool {
self.flags.contains(ImplFlags::NEGATIVE)
}
#[inline]
pub fn is_default(&self) -> bool {
self.flags.contains(ImplFlags::DEFAULT)
}
}
bitflags::bitflags! {

View file

@ -7,7 +7,7 @@ use base_db::{
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
};
use hir_expand::{InFile, files::FilePosition};
use salsa::{AsDynDatabase, Durability};
use salsa::Durability;
use span::FileId;
use syntax::{AstNode, algo, ast};
use triomphe::Arc;
@ -303,8 +303,7 @@ impl TestDB {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
let ingredient = (self as &dyn salsa::Database)
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}

View file

@ -50,6 +50,7 @@ tracing-tree.workspace = true
# local deps
stdx.workspace = true
macros.workspace = true
intern.workspace = true
hir-def.workspace = true
hir-expand.workspace = true

View file

@ -5,20 +5,21 @@
use std::fmt;
use hir_def::{TypeAliasId, lang_item::LangItem};
use hir_def::{TraitId, TypeAliasId, lang_item::LangItem};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use tracing::debug;
use triomphe::Arc;
use crate::next_solver::infer::InferOk;
use crate::{
TraitEnvironment,
db::HirDatabase,
infer::unify::InferenceTable,
next_solver::{
Ty, TyKind,
infer::traits::{ObligationCause, PredicateObligations},
mapping::{ChalkToNextSolver, NextSolverToChalk},
Canonical, TraitRef, Ty, TyKind,
infer::{
InferOk,
traits::{Obligation, ObligationCause, PredicateObligations},
},
obligation_ctxt::ObligationCtxt,
},
};
@ -35,17 +36,16 @@ const AUTODEREF_RECURSION_LIMIT: usize = 20;
pub fn autoderef<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
ty: crate::Canonical<crate::Ty>,
) -> impl Iterator<Item = crate::Ty> + use<> {
ty: Canonical<'db, Ty<'db>>,
) -> impl Iterator<Item = Ty<'db>> + use<'db> {
let mut table = InferenceTable::new(db, env);
let interner = table.interner;
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new_no_tracking(&mut table, ty.to_nextsolver(interner));
let mut autoderef = Autoderef::new_no_tracking(&mut table, ty);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
// resolved, just replace with fallback type.
let resolved = autoderef.table.resolve_completely(ty.to_chalk(interner));
let resolved = autoderef.table.resolve_completely(ty);
// If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we
// would revisit some already visited types. Stop here to avoid duplication.
@ -101,6 +101,7 @@ struct AutoderefSnapshot<'db, Steps> {
#[derive(Clone, Copy)]
struct AutoderefTraits {
trait_: TraitId,
trait_target: TypeAliasId,
}
@ -215,16 +216,26 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
Some(it) => Some(*it),
None => {
let traits = if self.use_receiver_trait {
AutoderefTraits {
trait_target: LangItem::ReceiverTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)
.or_else(|| {
LangItem::DerefTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)
})?,
}
(|| {
Some(AutoderefTraits {
trait_: LangItem::Receiver
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
trait_target: LangItem::ReceiverTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
})
})()
.or_else(|| {
Some(AutoderefTraits {
trait_: LangItem::Deref
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
})
})?
} else {
AutoderefTraits {
trait_: LangItem::Deref
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
}
@ -236,10 +247,22 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option<Ty<'db>> {
debug!("overloaded_deref_ty({:?})", ty);
let interner = self.table.interner;
let interner = self.table.interner();
// <ty as Deref>, or whatever the equivalent trait is that we've been asked to walk.
let AutoderefTraits { trait_target } = self.autoderef_traits()?;
let AutoderefTraits { trait_, trait_target } = self.autoderef_traits()?;
let trait_ref = TraitRef::new(interner, trait_.into(), [ty]);
let obligation =
Obligation::new(interner, ObligationCause::new(), self.table.trait_env.env, trait_ref);
// We detect whether the self type implements `Deref` before trying to
// structurally normalize. We use `predicate_may_hold_opaque_types_jank`
// to support not-yet-defined opaque types. It will succeed for `impl Deref`
// but fail for `impl OtherTrait`.
if !self.table.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation) {
debug!("overloaded_deref_ty: cannot match obligation");
return None;
}
let (normalized_ty, obligations) = structurally_normalize_ty(
self.table,
@ -316,7 +339,7 @@ pub(crate) fn overloaded_deref_ty<'db>(
table: &InferenceTable<'db>,
ty: Ty<'db>,
) -> Option<InferOk<'db, Ty<'db>>> {
let interner = table.interner;
let interner = table.interner();
let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?;

View file

@ -15,7 +15,10 @@ use crate::{
error_lifetime,
generics::generics,
infer::unify::InferenceTable,
next_solver::{DbInterner, EarlyBinder, mapping::ChalkToNextSolver},
next_solver::{
DbInterner, EarlyBinder,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
primitive, to_assoc_type_id, to_chalk_trait_id,
};
@ -127,11 +130,14 @@ impl<D> TyBuilder<D> {
}
pub fn fill_with_unknown(self) -> Self {
let interner = DbInterner::conjure();
// self.fill is inlined to make borrow checker happy
let mut this = self;
let filler = this.param_kinds[this.vec.len()..].iter().map(|x| match x {
ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
ParamKind::Const(ty) => {
unknown_const_as_generic(ty.to_nextsolver(interner)).to_chalk(interner)
}
ParamKind::Lifetime => error_lifetime().cast(Interner),
});
this.vec.extend(filler.casted(Interner));
@ -141,10 +147,13 @@ impl<D> TyBuilder<D> {
#[tracing::instrument(skip_all)]
pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
self.fill(|x| match x {
ParamKind::Type => table.new_type_var().cast(Interner),
ParamKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
ParamKind::Lifetime => table.new_lifetime_var().cast(Interner),
self.fill(|x| {
match x {
ParamKind::Type => crate::next_solver::GenericArg::Ty(table.next_ty_var()),
ParamKind::Const(_) => table.next_const_var().into(),
ParamKind::Lifetime => table.next_region_var().into(),
}
.to_chalk(table.interner())
})
}
@ -213,13 +222,16 @@ impl TyBuilder<()> {
}
pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
let interner = DbInterner::conjure();
let params = generics(db, def.into());
Substitution::from_iter(
Interner,
params.iter_id().map(|id| match id {
GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericParamId::ConstParamId(id) => {
unknown_const_as_generic(db.const_param_ty(id)).cast(Interner)
unknown_const_as_generic(db.const_param_ty_ns(id))
.to_chalk(interner)
.cast(Interner)
}
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
}),
@ -261,6 +273,7 @@ impl TyBuilder<hir_def::AdtId> {
db: &dyn HirDatabase,
mut fallback: impl FnMut() -> Ty,
) -> Self {
let interner = DbInterner::conjure();
// Note that we're building ADT, so we never have parent generic parameters.
let defaults = db.generic_defaults(self.data.into());
@ -281,7 +294,9 @@ impl TyBuilder<hir_def::AdtId> {
// The defaults may be missing if no param has default, so fill that.
let filler = self.param_kinds[self.vec.len()..].iter().map(|x| match x {
ParamKind::Type => fallback().cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
ParamKind::Const(ty) => {
unknown_const_as_generic(ty.to_nextsolver(interner)).to_chalk(interner)
}
ParamKind::Lifetime => error_lifetime().cast(Interner),
});
self.vec.extend(filler.casted(Interner));

View file

@ -2,7 +2,7 @@
//! about the code that Chalk needs.
use hir_def::{CallableDefId, GenericDefId};
use crate::{Interner, Substitution, db::HirDatabase, mapping::from_chalk};
use crate::{Interner, db::HirDatabase, mapping::from_chalk};
pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
pub(crate) type TraitId = chalk_ir::TraitId<Interner>;
@ -53,16 +53,3 @@ pub(crate) fn adt_variance_query(db: &dyn HirDatabase, adt_id: hir_def::AdtId) -
}),
)
}
/// Returns instantiated predicates.
pub(super) fn convert_where_clauses(
db: &dyn HirDatabase,
def: GenericDefId,
substs: &Substitution,
) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
db.generic_predicates(def)
.iter()
.cloned()
.map(|pred| pred.substitute(Interner, substs))
.collect()
}

View file

@ -1,66 +1,32 @@
//! Various extensions traits for Chalk types.
use chalk_ir::{
FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy, cast::Cast,
};
use hir_def::{
DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
hir::generics::{TypeOrConstParamData, TypeParamProvenance},
lang_item::LangItem,
type_ref::Rawness,
};
use chalk_ir::Mutability;
use hir_def::{FunctionId, ItemContainerId, Lookup, TraitId};
use crate::{
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, ToChalk, TraitRef, Ty, TyBuilder, TyKind, TypeFlags,
WhereClause,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
generics::generics,
next_solver::{DbInterner, mapping::NextSolverToChalk},
to_chalk_trait_id,
AdtId, Binders, CallableDefId, CallableSig, DynTy, Interner, Lifetime, ProjectionTy,
Substitution, ToChalk, TraitRef, Ty, TyKind, TypeFlags, WhereClause, db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id, generics::generics, to_chalk_trait_id,
utils::ClosureSubst,
};
pub trait TyExt {
pub(crate) trait TyExt {
fn is_unit(&self) -> bool;
fn is_integral(&self) -> bool;
fn is_scalar(&self) -> bool;
fn is_floating_point(&self) -> bool;
fn is_never(&self) -> bool;
fn is_str(&self) -> bool;
fn is_unknown(&self) -> bool;
fn contains_unknown(&self) -> bool;
fn is_ty_var(&self) -> bool;
fn is_union(&self) -> bool;
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
fn as_builtin(&self) -> Option<BuiltinType>;
fn as_tuple(&self) -> Option<&Substitution>;
fn as_closure(&self) -> Option<ClosureId>;
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)>;
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId>;
fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>;
fn strip_references(&self) -> &Ty;
fn strip_reference(&self) -> &Ty;
/// If this is a `dyn Trait`, returns that trait.
fn dyn_trait(&self) -> Option<TraitId>;
fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool;
/// FIXME: Get rid of this, it's not a good abstraction
fn equals_ctor(&self, other: &Ty) -> bool;
}
impl TyExt for Ty {
@ -68,33 +34,6 @@ impl TyExt for Ty {
matches!(self.kind(Interner), TyKind::Tuple(0, _))
}
fn is_integral(&self) -> bool {
matches!(
self.kind(Interner),
TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
| TyKind::InferenceVar(_, TyVariableKind::Integer)
)
}
fn is_scalar(&self) -> bool {
matches!(self.kind(Interner), TyKind::Scalar(_))
}
fn is_floating_point(&self) -> bool {
matches!(
self.kind(Interner),
TyKind::Scalar(Scalar::Float(_)) | TyKind::InferenceVar(_, TyVariableKind::Float)
)
}
fn is_never(&self) -> bool {
matches!(self.kind(Interner), TyKind::Never)
}
fn is_str(&self) -> bool {
matches!(self.kind(Interner), TyKind::Str)
}
fn is_unknown(&self) -> bool {
matches!(self.kind(Interner), TyKind::Error)
}
@ -103,14 +42,6 @@ impl TyExt for Ty {
self.data(Interner).flags.contains(TypeFlags::HAS_ERROR)
}
fn is_ty_var(&self) -> bool {
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
}
fn is_union(&self) -> bool {
matches!(self.adt_id(Interner), Some(AdtId(hir_def::AdtId::UnionId(_))))
}
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
match self.kind(Interner) {
TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
@ -118,37 +49,6 @@ impl TyExt for Ty {
}
}
fn as_builtin(&self) -> Option<BuiltinType> {
match self.kind(Interner) {
TyKind::Str => Some(BuiltinType::Str),
TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
FloatTy::F128 => BuiltinFloat::F128,
FloatTy::F64 => BuiltinFloat::F64,
FloatTy::F32 => BuiltinFloat::F32,
FloatTy::F16 => BuiltinFloat::F16,
})),
TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
IntTy::Isize => BuiltinInt::Isize,
IntTy::I8 => BuiltinInt::I8,
IntTy::I16 => BuiltinInt::I16,
IntTy::I32 => BuiltinInt::I32,
IntTy::I64 => BuiltinInt::I64,
IntTy::I128 => BuiltinInt::I128,
})),
TyKind::Scalar(Scalar::Uint(ity)) => Some(BuiltinType::Uint(match ity {
UintTy::Usize => BuiltinUint::Usize,
UintTy::U8 => BuiltinUint::U8,
UintTy::U16 => BuiltinUint::U16,
UintTy::U32 => BuiltinUint::U32,
UintTy::U64 => BuiltinUint::U64,
UintTy::U128 => BuiltinUint::U128,
})),
_ => None,
}
}
fn as_tuple(&self) -> Option<&Substitution> {
match self.kind(Interner) {
TyKind::Tuple(_, substs) => Some(substs),
@ -156,13 +56,6 @@ impl TyExt for Ty {
}
}
fn as_closure(&self) -> Option<ClosureId> {
match self.kind(Interner) {
TyKind::Closure(id, _) => Some(*id),
_ => None,
}
}
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
match self.callable_def(db) {
Some(CallableDefId::FunctionId(func)) => Some(func),
@ -177,33 +70,6 @@ impl TyExt for Ty {
}
}
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)> {
match self.kind(Interner) {
TyKind::Raw(mutability, ty) => Some((ty, *mutability)),
_ => None,
}
}
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
match self.kind(Interner) {
TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),
TyKind::Raw(mutability, ty) => Some((ty, Rawness::RawPtr, *mutability)),
_ => None,
}
}
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
match *self.kind(Interner) {
TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
TyKind::FnDef(callable, ..) => {
Some(GenericDefId::from_callable(db, ToChalk::from_chalk(db, callable)))
}
TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
_ => None,
}
}
fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId> {
match self.kind(Interner) {
&TyKind::FnDef(def, ..) => Some(ToChalk::from_chalk(db, def)),
@ -244,177 +110,6 @@ impl TyExt for Ty {
}
t
}
fn strip_reference(&self) -> &Ty {
self.as_reference().map_or(self, |(ty, _, _)| ty)
}
fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> {
let handle_async_block_type_impl_trait = |def: DefWithBodyId| {
let krate = def.module(db).krate();
if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
// This is only used by type walking.
// Parameters will be walked outside, and projection predicate is not used.
// So just provide the Future trait.
let impl_bound = Binders::empty(
Interner,
WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(future_trait),
substitution: Substitution::empty(Interner),
}),
);
Some(vec![impl_bound])
} else {
None
}
};
match self.kind(Interner) {
TyKind::OpaqueType(opaque_ty_id, subst) => {
match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
handle_async_block_type_impl_trait(def)
}
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
db.return_type_impl_traits(func).map(|it| {
let data =
(*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
data.substitute(Interner, &subst).into_value_and_skipped_binders().0
})
}
ImplTraitId::TypeAliasImplTrait(alias, idx) => {
db.type_alias_impl_traits(alias).map(|it| {
let data =
(*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
data.substitute(Interner, &subst).into_value_and_skipped_binders().0
})
}
}
}
TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
let predicates = match db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into())
{
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
db.return_type_impl_traits(func).map(|it| {
let data =
(*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
data.substitute(Interner, &opaque_ty.substitution)
})
}
ImplTraitId::TypeAliasImplTrait(alias, idx) => {
db.type_alias_impl_traits(alias).map(|it| {
let data =
(*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
data.substitute(Interner, &opaque_ty.substitution)
})
}
ImplTraitId::AsyncBlockTypeImplTrait(def, _) => {
return handle_async_block_type_impl_trait(def);
}
};
predicates.map(|it| it.into_value_and_skipped_binders().0)
}
TyKind::Placeholder(idx) => {
let id = from_placeholder_idx(db, *idx).0;
let generic_params = db.generic_params(id.parent);
let param_data = &generic_params[id.local_id];
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::ArgumentImplTrait => {
let substs = TyBuilder::placeholder_subst(db, id.parent);
let predicates = db
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
.filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
&tr.self_type_parameter(Interner) == self
}
WhereClause::AliasEq(AliasEq {
alias: AliasTy::Projection(proj),
ty: _,
}) => &proj.self_type_parameter(db) == self,
WhereClause::TypeOutlives(TypeOutlives { ty, lifetime: _ }) => {
ty == self
}
_ => false,
})
.collect::<Vec<_>>();
Some(predicates)
}
_ => None,
},
_ => None,
}
}
_ => None,
}
}
fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
match self.kind(Interner) {
TyKind::AssociatedType(id, ..) => match from_assoc_type_id(*id).lookup(db).container {
ItemContainerId::TraitId(trait_id) => Some(trait_id),
_ => None,
},
TyKind::Alias(AliasTy::Projection(projection_ty)) => {
match from_assoc_type_id(projection_ty.associated_ty_id).lookup(db).container {
ItemContainerId::TraitId(trait_id) => Some(trait_id),
_ => None,
}
}
_ => None,
}
}
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
let crate_id = owner.module(db).krate();
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, crate_id) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
let env = db.trait_environment_for_body(owner);
let goal = Canonical {
value: InEnvironment::new(
&env.env.to_chalk(DbInterner::new_with(db, Some(env.krate), env.block)),
trait_ref.cast(Interner),
),
binders: CanonicalVarKinds::empty(Interner),
};
!db.trait_solve(crate_id, None, goal).no_solution()
}
fn equals_ctor(&self, other: &Ty) -> bool {
match (self.kind(Interner), other.kind(Interner)) {
(TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,
(TyKind::Slice(_), TyKind::Slice(_)) | (TyKind::Array(_, _), TyKind::Array(_, _)) => {
true
}
(TyKind::FnDef(def_id, ..), TyKind::FnDef(def_id2, ..)) => def_id == def_id2,
(TyKind::OpaqueType(ty_id, ..), TyKind::OpaqueType(ty_id2, ..)) => ty_id == ty_id2,
(TyKind::AssociatedType(ty_id, ..), TyKind::AssociatedType(ty_id2, ..)) => {
ty_id == ty_id2
}
(TyKind::Foreign(ty_id, ..), TyKind::Foreign(ty_id2, ..)) => ty_id == ty_id2,
(TyKind::Closure(id1, _), TyKind::Closure(id2, _)) => id1 == id2,
(TyKind::Ref(mutability, ..), TyKind::Ref(mutability2, ..))
| (TyKind::Raw(mutability, ..), TyKind::Raw(mutability2, ..)) => {
mutability == mutability2
}
(
TyKind::Function(FnPointer { num_binders, sig, .. }),
TyKind::Function(FnPointer { num_binders: num_binders2, sig: sig2, .. }),
) => num_binders == num_binders2 && sig == sig2,
(TyKind::Tuple(cardinality, _), TyKind::Tuple(cardinality2, _)) => {
cardinality == cardinality2
}
(TyKind::Str, TyKind::Str) | (TyKind::Never, TyKind::Never) => true,
(TyKind::Scalar(scalar), TyKind::Scalar(scalar2)) => scalar == scalar2,
_ => false,
}
}
}
pub trait ProjectionTyExt {
@ -445,9 +140,8 @@ impl ProjectionTyExt for ProjectionTy {
}
}
pub trait DynTyExt {
pub(crate) trait DynTyExt {
fn principal(&self) -> Option<Binders<Binders<&TraitRef>>>;
fn principal_id(&self) -> Option<chalk_ir::TraitId<Interner>>;
}
impl DynTyExt for DynTy {
@ -461,13 +155,6 @@ impl DynTyExt for DynTy {
})
})
}
fn principal_id(&self) -> Option<chalk_ir::TraitId<Interner>> {
self.bounds.skip_binders().interned().first().and_then(|b| match b.skip_binders() {
crate::WhereClause::Implemented(trait_ref) => Some(trait_ref.trait_id),
_ => None,
})
}
}
pub trait TraitRefExt {

View file

@ -1,67 +1,95 @@
//! Constant evaluation details
#[cfg(test)]
mod tests;
use base_db::Crate;
use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast};
use hir_def::{
EnumVariantId, GeneralConstId, HasModule as _, StaticId,
EnumVariantId, GeneralConstId,
expr_store::{Body, HygieneId, path::Path},
hir::{Expr, ExprId},
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
};
use hir_def::{HasModule, StaticId};
use hir_expand::Lookup;
use rustc_type_ir::{UnevaluatedConst, inherent::IntoKind};
use stdx::never;
use triomphe::Arc;
use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
TraitEnvironment, Ty, TyBuilder,
MemoryMap, TraitEnvironment,
db::HirDatabase,
display::DisplayTarget,
generics::Generics,
infer::InferenceContext,
lower::ParamLoweringMode,
next_solver::{DbInterner, mapping::ChalkToNextSolver},
to_placeholder_idx,
mir::{MirEvalError, MirLowerError},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
ParamConst, SolverDefId, Ty, ValueConst,
},
};
use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
use super::mir::{interpret_mir, lower_to_mir, pad16};
/// Extension trait for [`Const`]
pub trait ConstExt {
/// Is a [`Const`] unknown?
fn is_unknown(&self) -> bool;
}
impl ConstExt for Const {
fn is_unknown(&self) -> bool {
match self.data(Interner).value {
// interned Unknown
chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: ConstScalar::Unknown,
}) => true,
// interned concrete anything else
chalk_ir::ConstValue::Concrete(..) => false,
_ => {
tracing::error!(
"is_unknown was called on a non-concrete constant value! {:?}",
self
);
true
pub(crate) fn path_to_const<'a, 'g>(
db: &'a dyn HirDatabase,
resolver: &Resolver<'a>,
path: &Path,
args: impl FnOnce() -> &'g Generics,
_expected_ty: Ty<'a>,
) -> Option<Const<'a>> {
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
Some(ValueNs::GenericParam(p)) => {
let args = args();
match args
.type_or_const_param(p.into())
.and_then(|(idx, p)| p.const_param().map(|p| (idx, p.clone())))
{
Some((idx, _param)) => {
Some(Const::new_param(interner, ParamConst { index: idx as u32, id: p }))
}
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
None
}
}
}
Some(ValueNs::ConstId(c)) => {
let args = GenericArgs::new_from_iter(interner, []);
Some(Const::new(
interner,
rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new(
SolverDefId::ConstId(c),
args,
)),
))
}
_ => None,
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError {
MirLowerError(MirLowerError),
MirEvalError(MirEvalError),
pub fn unknown_const<'db>(_ty: Ty<'db>) -> Const<'db> {
Const::new(DbInterner::conjure(), rustc_type_ir::ConstKind::Error(ErrorGuaranteed))
}
impl ConstEvalError {
pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> {
unknown_const(ty).into()
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError<'db> {
MirLowerError(MirLowerError<'db>),
MirEvalError(MirEvalError<'db>),
}
impl ConstEvalError<'_> {
pub fn pretty_print(
&self,
f: &mut String,
@ -80,8 +108,8 @@ impl ConstEvalError {
}
}
impl From<MirLowerError> for ConstEvalError {
fn from(value: MirLowerError) -> Self {
impl<'db> From<MirLowerError<'db>> for ConstEvalError<'db> {
fn from(value: MirLowerError<'db>) -> Self {
match value {
MirLowerError::ConstEvalError(_, e) => *e,
_ => ConstEvalError::MirLowerError(value),
@ -89,201 +117,118 @@ impl From<MirLowerError> for ConstEvalError {
}
}
impl From<MirEvalError> for ConstEvalError {
fn from(value: MirEvalError) -> Self {
impl<'db> From<MirEvalError<'db>> for ConstEvalError<'db> {
fn from(value: MirEvalError<'db>) -> Self {
ConstEvalError::MirEvalError(value)
}
}
pub(crate) fn path_to_const<'g>(
db: &dyn HirDatabase,
resolver: &Resolver<'_>,
path: &Path,
mode: ParamLoweringMode,
args: impl FnOnce() -> &'g Generics,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p);
let args = args();
let value = match mode {
ParamLoweringMode::Placeholder => {
let idx = args.type_or_const_param_idx(p.into()).unwrap();
ConstValue::Placeholder(to_placeholder_idx(db, p.into(), idx as u32))
}
ParamLoweringMode::Variable => match args.type_or_const_param_idx(p.into()) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
return None;
}
},
};
Some(ConstData { ty, value }.intern(Interner))
}
Some(ValueNs::ConstId(c)) => Some(intern_const_scalar(
ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)),
expected_ty,
)),
// FIXME: With feature(adt_const_params), we also need to consider other things here, e.g. struct constructors.
_ => None,
}
}
pub fn unknown_const(ty: Ty) -> Const {
ConstData {
ty,
value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }),
}
.intern(Interner)
}
pub fn unknown_const_as_generic(ty: Ty) -> GenericArg {
unknown_const(ty).cast(Interner)
}
/// Interns a constant scalar with the given type
pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) }
.intern(Interner)
}
/// Interns a constant scalar with the given type
pub fn intern_const_ref(
db: &dyn HirDatabase,
pub fn intern_const_ref<'a>(
db: &'a dyn HirDatabase,
value: &LiteralConstRef,
ty: Ty,
ty: Ty<'a>,
krate: Crate,
) -> Const {
) -> Const<'a> {
let interner = DbInterner::new_with(db, Some(krate), None);
let layout = || db.layout_of_ty(ty.to_nextsolver(interner), TraitEnvironment::empty(krate));
let bytes = match value {
let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate));
let kind = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes {
memory: i.to_le_bytes()[0..size].into(),
memory_map: MemoryMap::default(),
},
))
}
LiteralConstRef::UInt(i) => {
let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes {
memory: i.to_le_bytes()[0..size].into(),
memory_map: MemoryMap::default(),
},
))
}
LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()),
LiteralConstRef::Char(c) => {
ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default())
}
LiteralConstRef::Unknown => ConstScalar::Unknown,
LiteralConstRef::Bool(b) => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes { memory: Box::new([*b as u8]), memory_map: MemoryMap::default() },
)),
LiteralConstRef::Char(c) => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes {
memory: (*c as u32).to_le_bytes().into(),
memory_map: MemoryMap::default(),
},
)),
LiteralConstRef::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
};
intern_const_scalar(bytes, ty)
Const::new(interner, kind)
}
/// Interns a possibly-unknown target usize
pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: Crate) -> Const {
pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Crate) -> Const<'db> {
intern_const_ref(
db,
&value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
TyBuilder::usize(),
Ty::new_uint(DbInterner::new_with(db, Some(krate), None), rustc_type_ir::UintTy::Usize),
krate,
)
}
pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(_) => None,
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(it, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_usize(db, &ec)
}
_ => None,
},
pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: Const<'db>) -> Option<u128> {
match c.kind() {
ConstKind::Param(_) => None,
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
ConstKind::Unevaluated(unevaluated_const) => {
let c = match unevaluated_const.def {
SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
_ => unreachable!(),
};
let subst = unevaluated_const.args;
let ec = db.const_eval(c, subst, None).ok()?;
try_const_usize(db, ec)
}
ConstKind::Value(val) => Some(u128::from_le_bytes(pad16(&val.value.inner().memory, false))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
}
}
pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(_) => None,
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(it, _) => Some(i128::from_le_bytes(pad16(it, true))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_isize(db, &ec)
}
_ => None,
},
pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<i128> {
match (*c).kind() {
ConstKind::Param(_) => None,
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
ConstKind::Unevaluated(unevaluated_const) => {
let c = match unevaluated_const.def {
SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
_ => unreachable!(),
};
let subst = unevaluated_const.args;
let ec = db.const_eval(c, subst, None).ok()?;
try_const_isize(db, &ec)
}
ConstKind::Value(val) => Some(i128::from_le_bytes(pad16(&val.value.inner().memory, true))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
}
}
pub(crate) fn const_eval_cycle_result(
_: &dyn HirDatabase,
_: GeneralConstId,
_: Substitution,
_: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_cycle_result(
_: &dyn HirDatabase,
_: StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_discriminant_cycle_result(
_: &dyn HirDatabase,
_: EnumVariantId,
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
def: GeneralConstId,
subst: Substitution,
trait_env: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError> {
let body = match def {
GeneralConstId::ConstId(c) => {
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
}
GeneralConstId::StaticId(s) => {
let krate = s.module(db).krate();
db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
}
};
let c = interpret_mir(db, body, false, trait_env)?.0?;
Ok(c)
}
pub(crate) fn const_eval_static_query(
db: &dyn HirDatabase,
def: StaticId,
) -> Result<Const, ConstEvalError> {
let body = db.monomorphized_mir_body(
def.into(),
Substitution::empty(Interner),
db.trait_environment_for_body(def.into()),
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c)
}
pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
pub(crate) fn const_eval_discriminant_variant<'db>(
db: &'db dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<i128, ConstEvalError> {
) -> Result<i128, ConstEvalError<'db>> {
let interner = DbInterner::new_with(db, None, None);
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
@ -305,14 +250,14 @@ pub(crate) fn const_eval_discriminant_variant(
let mir_body = db.monomorphized_mir_body(
def,
Substitution::empty(Interner),
GenericArgs::new_from_iter(interner, []),
db.trait_environment_for_body(def),
)?;
let c = interpret_mir(db, mir_body, false, None)?.0?;
let c = if is_signed {
try_const_isize(db, &c).unwrap()
} else {
try_const_usize(db, &c).unwrap() as i128
try_const_usize(db, c).unwrap() as i128
};
Ok(c)
}
@ -320,13 +265,7 @@ pub(crate) fn const_eval_discriminant_variant(
// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
debruijn: DebruijnIndex,
) -> Const {
let db = ctx.db;
pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'db>) -> Const<'db> {
let infer = ctx.fixme_resolve_all_clone();
fn has_closure(body: &Body, expr: ExprId) -> bool {
if matches!(body[expr], Expr::Closure { .. }) {
@ -338,23 +277,74 @@ pub(crate) fn eval_to_const(
}
if has_closure(ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
return unknown_const(infer[expr]);
}
if let Expr::Path(p) = &ctx.body[expr] {
let resolver = &ctx.resolver;
if let Some(c) =
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
{
if let Some(c) = path_to_const(ctx.db, resolver, p, || ctx.generics(), infer[expr]) {
return c;
}
}
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr)
&& let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None)
&& let Ok((Ok(result), _)) = interpret_mir(ctx.db, Arc::new(mir_body), true, None)
{
return result;
}
unknown_const(infer[expr].clone())
unknown_const(infer[expr])
}
#[cfg(test)]
mod tests;
pub(crate) fn const_eval_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: GeneralConstId,
_: GenericArgs<'db>,
_: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_discriminant_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_query<'db>(
db: &'db dyn HirDatabase,
def: GeneralConstId,
subst: GenericArgs<'db>,
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let body = match def {
GeneralConstId::ConstId(c) => {
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
}
GeneralConstId::StaticId(s) => {
let krate = s.module(db).krate();
db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
}
};
let c = interpret_mir(db, body, false, trait_env)?.0?;
Ok(c)
}
pub(crate) fn const_eval_static_query<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let interner = DbInterner::new_with(db, None, None);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::new_from_iter(interner, []),
db.trait_environment_for_body(def.into()),
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c)
}

View file

@ -1,17 +1,23 @@
use base_db::RootQueryDb;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
use hir_expand::EditionedFileId;
use rustc_apfloat::{
Float,
ieee::{Half as f16, Quad as f128},
};
use rustc_type_ir::inherent::IntoKind;
use test_fixture::WithFixture;
use test_utils::skip_slow_tests;
use crate::{
Const, ConstScalar, Interner, MemoryMap, consteval::try_const_usize, db::HirDatabase,
display::DisplayTarget, mir::pad16, setup_tracing, test_db::TestDB,
MemoryMap,
consteval::try_const_usize,
db::HirDatabase,
display::DisplayTarget,
mir::pad16,
next_solver::{Const, ConstBytes, ConstKind, DbInterner, GenericArgs},
setup_tracing,
test_db::TestDB,
};
use super::{
@ -21,7 +27,7 @@ use super::{
mod intrinsics;
fn simplify(e: ConstEvalError) -> ConstEvalError {
fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> {
match e {
ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
@ -33,10 +39,10 @@ fn simplify(e: ConstEvalError) -> ConstEvalError {
#[track_caller]
fn check_fail(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
error: impl FnOnce(ConstEvalError) -> bool,
error: impl FnOnce(ConstEvalError<'_>) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
salsa::attach(&db, || match eval_goal(&db, file_id) {
crate::attach_db(&db, || match eval_goal(&db, file_id) {
Ok(_) => panic!("Expected fail, but it succeeded"),
Err(e) => {
assert!(error(simplify(e.clone())), "Actual error was: {}", pretty_print_err(e, &db))
@ -79,7 +85,7 @@ fn check_answer(
check: impl FnOnce(&[u8], &MemoryMap<'_>),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
@ -88,19 +94,17 @@ fn check_answer(
panic!("Error in evaluating goal: {err}");
}
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, mm) => {
check(b, mm);
}
x => panic!("Expected number but found {x:?}"),
},
_ => panic!("result of const eval wasn't a concrete const"),
match r.kind() {
ConstKind::Value(value) => {
let ConstBytes { memory, memory_map } = value.value.inner();
check(memory, memory_map);
}
_ => panic!("Expected number but found {r:?}"),
}
});
}
fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let display_target =
@ -117,8 +121,9 @@ fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
err
}
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError<'_>> {
let _tracing = setup_tracing();
let interner = DbInterner::new_with(db, None, None);
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
@ -137,7 +142,7 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalEr
_ => None,
})
.expect("No const named GOAL found in the test");
db.const_eval(const_id.into(), Substitution::empty(Interner), None)
db.const_eval(const_id.into(), GenericArgs::new_from_iter(interner, []), None)
}
#[test]
@ -2506,8 +2511,10 @@ fn enums() {
const GOAL: E = E::A;
"#,
);
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&db, &r), Some(1));
crate::attach_db(&db, || {
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&db, r), Some(1));
})
}
#[test]

View file

@ -354,8 +354,9 @@ fn overflowing_add() {
fn needs_drop() {
check_number(
r#"
//- minicore: drop, manually_drop, copy, sized
//- minicore: drop, manually_drop, copy, sized, phantom_data
use core::mem::ManuallyDrop;
use core::marker::PhantomData;
extern "rust-intrinsic" {
pub fn needs_drop<T: ?Sized>() -> bool;
}
@ -380,17 +381,19 @@ fn needs_drop() {
const fn opaque_copy() -> impl Sized + Copy {
|| {}
}
struct RecursiveType(RecursiveType);
trait Everything {}
impl<T> Everything for T {}
const GOAL: bool = !needs_drop::<i32>() && !needs_drop::<X>()
&& needs_drop::<NeedsDrop>() && !needs_drop::<ManuallyDrop<NeedsDrop>>()
&& needs_drop::<[NeedsDrop; 1]>() && !needs_drop::<[NeedsDrop; 0]>()
&& needs_drop::<(X, NeedsDrop)>()
&& needs_drop::<(X, NeedsDrop)>() && !needs_drop::<PhantomData<NeedsDrop>>()
&& needs_drop::<Enum<NeedsDrop>>() && !needs_drop::<Enum<X>>()
&& closure_needs_drop()
&& !val_needs_drop(opaque()) && !val_needs_drop(opaque_copy())
&& needs_drop::<[NeedsDrop]>() && needs_drop::<dyn Everything>()
&& !needs_drop::<&dyn Everything>() && !needs_drop::<str>();
&& !needs_drop::<&dyn Everything>() && !needs_drop::<str>()
&& !needs_drop::<RecursiveType>();
"#,
1,
);

View file

@ -0,0 +1,108 @@
//! Constant evaluation details
use base_db::Crate;
use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast};
use hir_def::{
expr_store::{HygieneId, path::Path},
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
};
use stdx::never;
use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
TraitEnvironment, Ty,
db::HirDatabase,
generics::Generics,
lower::ParamLoweringMode,
next_solver::{DbInterner, mapping::ChalkToNextSolver},
to_placeholder_idx,
};
pub(crate) fn path_to_const<'g>(
db: &dyn HirDatabase,
resolver: &Resolver<'_>,
path: &Path,
mode: ParamLoweringMode,
args: impl FnOnce() -> &'g Generics,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p);
let args = args();
let value = match mode {
ParamLoweringMode::Placeholder => {
let idx = args.type_or_const_param_idx(p.into()).unwrap();
ConstValue::Placeholder(to_placeholder_idx(db, p.into(), idx as u32))
}
ParamLoweringMode::Variable => match args.type_or_const_param_idx(p.into()) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
return None;
}
},
};
Some(ConstData { ty, value }.intern(Interner))
}
Some(ValueNs::ConstId(c)) => Some(intern_const_scalar(
ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)),
expected_ty,
)),
// FIXME: With feature(adt_const_params), we also need to consider other things here, e.g. struct constructors.
_ => None,
}
}
pub(crate) fn unknown_const(ty: Ty) -> Const {
ConstData {
ty,
value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }),
}
.intern(Interner)
}
pub(crate) fn unknown_const_as_generic(ty: Ty) -> GenericArg {
unknown_const(ty).cast(Interner)
}
/// Interns a constant scalar with the given type
pub(crate) fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) }
.intern(Interner)
}
/// Interns a constant scalar with the given type
pub(crate) fn intern_const_ref(
db: &dyn HirDatabase,
value: &LiteralConstRef,
ty: Ty,
krate: Crate,
) -> Const {
let interner = DbInterner::new_with(db, Some(krate), None);
let layout = || db.layout_of_ty(ty.to_nextsolver(interner), TraitEnvironment::empty(krate));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
}
LiteralConstRef::UInt(i) => {
let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
}
LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()),
LiteralConstRef::Char(c) => {
ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default())
}
LiteralConstRef::Unknown => ConstScalar::Unknown,
};
intern_const_scalar(bytes, ty)
}

View file

@ -1,256 +0,0 @@
//! Constant evaluation details
// FIXME(next-solver): this should get removed as things get moved to rustc_type_ir from chalk_ir
#![allow(unused)]
use base_db::Crate;
use hir_def::{
EnumVariantId, GeneralConstId,
expr_store::{Body, HygieneId, path::Path},
hir::{Expr, ExprId},
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
};
use hir_expand::Lookup;
use rustc_type_ir::{
UnevaluatedConst,
inherent::{IntoKind, SliceLike},
};
use stdx::never;
use triomphe::Arc;
use crate::{
ConstScalar, Interner, MemoryMap, Substitution, TraitEnvironment,
consteval::ConstEvalError,
db::HirDatabase,
generics::Generics,
infer::InferenceContext,
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
ParamConst, SolverDefId, Ty, ValueConst,
mapping::{ChalkToNextSolver, NextSolverToChalk, convert_binder_to_early_binder},
},
};
use super::mir::{interpret_mir, lower_to_mir, pad16};
pub(crate) fn path_to_const<'a, 'g>(
db: &'a dyn HirDatabase,
resolver: &Resolver<'a>,
path: &Path,
args: impl FnOnce() -> &'g Generics,
expected_ty: Ty<'a>,
) -> Option<Const<'a>> {
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
Some(ValueNs::GenericParam(p)) => {
let args = args();
match args
.type_or_const_param(p.into())
.and_then(|(idx, p)| p.const_param().map(|p| (idx, p.clone())))
{
Some((idx, _param)) => {
Some(Const::new_param(interner, ParamConst { index: idx as u32, id: p }))
}
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
None
}
}
}
Some(ValueNs::ConstId(c)) => {
let args = GenericArgs::new_from_iter(interner, []);
Some(Const::new(
interner,
rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new(
SolverDefId::ConstId(c),
args,
)),
))
}
_ => None,
}
}
pub fn unknown_const<'db>(ty: Ty<'db>) -> Const<'db> {
Const::new(DbInterner::conjure(), rustc_type_ir::ConstKind::Error(ErrorGuaranteed))
}
pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> {
unknown_const(ty).into()
}
/// Interns a constant scalar with the given type
pub fn intern_const_ref<'a>(
db: &'a dyn HirDatabase,
value: &LiteralConstRef,
ty: Ty<'a>,
krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_with(db, Some(krate), None);
let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate));
let kind = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()),
))
}
LiteralConstRef::UInt(i) => {
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()),
))
}
LiteralConstRef::Bool(b) => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes(Box::new([*b as u8]), MemoryMap::default()),
)),
LiteralConstRef::Char(c) => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes((*c as u32).to_le_bytes().into(), MemoryMap::default()),
)),
LiteralConstRef::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
};
Const::new(interner, kind)
}
/// Interns a possibly-unknown target usize
pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Crate) -> Const<'db> {
intern_const_ref(
db,
&value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
Ty::new_uint(DbInterner::new_with(db, Some(krate), None), rustc_type_ir::UintTy::Usize),
krate,
)
}
pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: Const<'db>) -> Option<u128> {
let interner = DbInterner::new_with(db, None, None);
match c.kind() {
ConstKind::Param(_) => None,
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
ConstKind::Unevaluated(unevaluated_const) => {
let c = match unevaluated_const.def {
SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
_ => unreachable!(),
};
let subst = unevaluated_const.args.to_chalk(interner);
let ec = db.const_eval(c, subst, None).ok()?.to_nextsolver(interner);
try_const_usize(db, ec)
}
ConstKind::Value(val) => Some(u128::from_le_bytes(pad16(&val.value.inner().0, false))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
}
}
pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<i128> {
let interner = DbInterner::new_with(db, None, None);
match (*c).kind() {
ConstKind::Param(_) => None,
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
ConstKind::Unevaluated(unevaluated_const) => {
let c = match unevaluated_const.def {
SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
_ => unreachable!(),
};
let subst = unevaluated_const.args.to_chalk(interner);
let ec = db.const_eval(c, subst, None).ok()?.to_nextsolver(interner);
try_const_isize(db, &ec)
}
ConstKind::Value(val) => Some(i128::from_le_bytes(pad16(&val.value.inner().0, true))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
}
}
pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<i128, ConstEvalError> {
let interner = DbInterner::new_with(db, None, None);
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
if matches!(body[body.body_expr], Expr::Missing) {
let prev_idx = loc.index.checked_sub(1);
let value = match prev_idx {
Some(prev_idx) => {
1 + db.const_eval_discriminant(
loc.parent.enum_variants(db).variants[prev_idx as usize].0,
)?
}
_ => 0,
};
return Ok(value);
}
let repr = db.enum_signature(loc.parent).repr;
let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
let mir_body = db.monomorphized_mir_body(
def,
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
let c = interpret_mir(db, mir_body, false, None)?.0?;
let c = c.to_nextsolver(interner);
let c = if is_signed {
try_const_isize(db, &c).unwrap()
} else {
try_const_usize(db, c).unwrap() as i128
};
Ok(c)
}
// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'db>) -> Const<'db> {
let interner = DbInterner::new_with(ctx.db, None, None);
let infer = ctx.fixme_resolve_all_clone();
fn has_closure(body: &Body, expr: ExprId) -> bool {
if matches!(body[expr], Expr::Closure { .. }) {
return true;
}
let mut r = false;
body.walk_child_exprs(expr, |idx| r |= has_closure(body, idx));
r
}
if has_closure(ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone().to_nextsolver(interner));
}
if let Expr::Path(p) = &ctx.body[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(
ctx.db,
resolver,
p,
|| ctx.generics(),
infer[expr].to_nextsolver(interner),
) {
return c;
}
}
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr)
&& let Ok((Ok(result), _)) = interpret_mir(ctx.db, Arc::new(mir_body), true, None)
{
return result.to_nextsolver(interner);
}
unknown_const(infer[expr].to_nextsolver(interner))
}

View file

@ -16,10 +16,9 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Substitution, TraitEnvironment, Ty,
TyDefId, ValueTyDefId, chalk_db,
Binders, ImplTraitId, ImplTraits, InferenceResult, TraitEnvironment, Ty, TyDefId, ValueTyDefId,
chalk_db,
consteval::ConstEvalError,
drop::DropGlue,
dyn_compatibility::DynCompatibilityViolation,
layout::{Layout, LayoutError},
lower::{Diagnostics, GenericDefaults, GenericPredicates},
@ -32,62 +31,77 @@ use crate::{
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::infer::infer_query)]
#[salsa::cycle(cycle_result = crate::infer::infer_cycle_result)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
fn infer<'db>(&'db self, def: DefWithBodyId) -> Arc<InferenceResult<'db>>;
// region:mir
#[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
fn mir_body<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
fn mir_body_for_closure<'db>(
&'db self,
def: InternedClosureId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)]
fn monomorphized_mir_body(
&self,
fn monomorphized_mir_body<'db>(
&'db self,
def: DefWithBodyId,
subst: Substitution,
env: Arc<TraitEnvironment<'_>>,
) -> Result<Arc<MirBody>, MirLowerError>;
subst: crate::next_solver::GenericArgs<'db>,
env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure(
&self,
fn monomorphized_mir_body_for_closure<'db>(
&'db self,
def: InternedClosureId,
subst: Substitution,
env: Arc<TraitEnvironment<'_>>,
) -> Result<Arc<MirBody>, MirLowerError>;
subst: crate::next_solver::GenericArgs<'db>,
env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
#[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
fn borrowck<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>>;
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)]
fn const_eval(
&self,
fn const_eval<'db>(
&'db self,
def: GeneralConstId,
subst: Substitution,
trait_env: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError>;
subst: crate::next_solver::GenericArgs<'db>,
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<crate::next_solver::Const<'db>, ConstEvalError<'db>>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
fn const_eval_static<'db>(
&'db self,
def: StaticId,
) -> Result<crate::next_solver::Const<'db>, ConstEvalError<'db>>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)]
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
fn const_eval_discriminant<'db>(
&'db self,
def: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>>;
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
&self,
env: Arc<TraitEnvironment<'_>>,
fn lookup_impl_method<'db>(
&'db self,
env: Arc<TraitEnvironment<'db>>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution);
fn_subst: crate::next_solver::GenericArgs<'db>,
) -> (FunctionId, crate::next_solver::GenericArgs<'db>);
// endregion:mir
@ -325,10 +339,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> NextTraitSolveResult;
#[salsa::invoke(crate::drop::has_drop_glue)]
#[salsa::cycle(cycle_result = crate::drop::has_drop_glue_cycle_result)]
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment<'_>>) -> DropGlue;
// next trait solver
#[salsa::invoke(crate::lower_nextsolver::const_param_ty_query)]
@ -370,6 +380,23 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&'db self,
def: GenericDefId,
) -> crate::lower_nextsolver::GenericPredicates<'db>;
#[salsa::invoke(crate::lower_nextsolver::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::generic_defaults_with_diagnostics_cycle_result)]
fn generic_defaults_ns_with_diagnostics<'db>(
&'db self,
def: GenericDefId,
) -> (crate::lower_nextsolver::GenericDefaults<'db>, Diagnostics);
/// This returns an empty list if no parameter has default.
///
/// The binders of the returned defaults are only up to (not including) this parameter.
#[salsa::invoke(crate::lower_nextsolver::generic_defaults_query)]
#[salsa::transparent]
fn generic_defaults_ns<'db>(
&'db self,
def: GenericDefId,
) -> crate::lower_nextsolver::GenericDefaults<'db>;
}
#[test]

View file

@ -23,6 +23,8 @@ use tracing::debug;
use triomphe::Arc;
use typed_arena::Arena;
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
Adjust, InferenceResult, Interner, TraitEnvironment, Ty, TyExt, TyKind,
db::HirDatabase,
@ -74,8 +76,16 @@ impl BodyValidationDiagnostic {
let infer = db.infer(owner);
let body = db.body(owner);
let env = db.trait_environment_for_body(owner);
let mut validator =
ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints, env };
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let mut validator = ExprValidator {
owner,
body,
infer,
diagnostics: Vec::new(),
validate_lints,
env,
interner,
};
validator.validate_body(db);
validator.diagnostics
}
@ -84,10 +94,11 @@ impl BodyValidationDiagnostic {
struct ExprValidator<'db> {
owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult>,
infer: Arc<InferenceResult<'db>>,
env: Arc<TraitEnvironment<'db>>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
interner: DbInterner<'db>,
}
impl<'db> ExprValidator<'db> {
@ -175,7 +186,7 @@ impl<'db> ExprValidator<'db> {
}
if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) {
checker.prev_receiver_ty = Some(receiver_ty.clone());
checker.prev_receiver_ty = Some(receiver_ty.to_chalk(self.interner));
}
}
}
@ -190,6 +201,7 @@ impl<'db> ExprValidator<'db> {
let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else {
return;
};
let scrut_ty = scrut_ty.to_chalk(self.interner);
if scrut_ty.contains_unknown() {
return;
}
@ -205,6 +217,7 @@ impl<'db> ExprValidator<'db> {
let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else {
return;
};
let pat_ty = pat_ty.to_chalk(self.interner);
if pat_ty.contains_unknown() {
return;
}
@ -222,7 +235,7 @@ impl<'db> ExprValidator<'db> {
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.map(|(match_expr_ty, ..)| *match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{
@ -264,7 +277,7 @@ impl<'db> ExprValidator<'db> {
match_expr,
uncovered_patterns: missing_match_arms(
&cx,
scrut_ty,
&scrut_ty,
witnesses,
m_arms.is_empty(),
self.owner.krate(db),
@ -298,10 +311,12 @@ impl<'db> ExprValidator<'db> {
);
value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
}
Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) {
TyKind::Adt(adt, ..) if matches!(adt.0, AdtId::UnionId(_)) => false,
_ => self.is_known_valid_scrutinee(*expr, db),
},
Expr::Field { expr, .. } => {
match self.infer.type_of_expr[*expr].to_chalk(self.interner).kind(Interner) {
TyKind::Adt(adt, ..) if matches!(adt.0, AdtId::UnionId(_)) => false,
_ => self.is_known_valid_scrutinee(*expr, db),
}
}
Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db),
Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr, db),
Expr::Missing => false,
@ -327,6 +342,7 @@ impl<'db> ExprValidator<'db> {
}
let Some(initializer) = initializer else { continue };
let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue };
let ty = ty.to_chalk(self.interner);
if ty.contains_unknown() {
continue;
}
@ -357,7 +373,7 @@ impl<'db> ExprValidator<'db> {
pat,
uncovered_patterns: missing_match_arms(
&cx,
ty,
&ty,
witnesses,
false,
self.owner.krate(db),
@ -542,7 +558,7 @@ impl FilterMapNextChecker {
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
infer: &InferenceResult<'_>,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -572,7 +588,7 @@ pub fn record_literal_missing_fields(
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
infer: &InferenceResult<'_>,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -600,8 +616,8 @@ pub fn record_pattern_missing_fields(
Some((variant_def, missed_fields, exhaustive))
}
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult<'_>) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult<'_>, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
None if *has_type_mismatches => (),

View file

@ -20,6 +20,8 @@ use hir_expand::name::Name;
use span::Edition;
use stdx::{always, never};
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
db::HirDatabase,
@ -93,16 +95,21 @@ pub(crate) enum PatKind {
},
}
pub(crate) struct PatCtxt<'a> {
db: &'a dyn HirDatabase,
infer: &'a InferenceResult,
body: &'a Body,
pub(crate) struct PatCtxt<'db> {
db: &'db dyn HirDatabase,
infer: &'db InferenceResult<'db>,
body: &'db Body,
pub(crate) errors: Vec<PatternError>,
interner: DbInterner<'db>,
}
impl<'a> PatCtxt<'a> {
pub(crate) fn new(db: &'a dyn HirDatabase, infer: &'a InferenceResult, body: &'a Body) -> Self {
Self { db, infer, body, errors: Vec::new() }
pub(crate) fn new(
db: &'a dyn HirDatabase,
infer: &'a InferenceResult<'a>,
body: &'a Body,
) -> Self {
Self { db, infer, body, errors: Vec::new(), interner: DbInterner::new_with(db, None, None) }
}
pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat {
@ -115,14 +122,14 @@ impl<'a> PatCtxt<'a> {
self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
unadjusted_pat,
|subpattern, ref_ty| Pat {
ty: ref_ty.clone(),
ty: ref_ty.to_chalk(self.interner).clone(),
kind: Box::new(PatKind::Deref { subpattern }),
},
)
}
fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat {
let mut ty = &self.infer[pat];
let mut ty = self.infer[pat].to_chalk(self.interner);
let variant = self.infer.variant_resolution_for_pat(pat);
let kind = match self.body[pat] {
@ -140,7 +147,7 @@ impl<'a> PatCtxt<'a> {
_ => {
never!("unexpected type for tuple pattern: {:?}", ty);
self.errors.push(PatternError::UnexpectedType);
return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
return Pat { ty, kind: PatKind::Wild.into() };
}
};
let subpatterns = self.lower_tuple_subpats(args, arity, ellipsis);
@ -149,10 +156,10 @@ impl<'a> PatCtxt<'a> {
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
ty = self.infer[id].to_chalk(self.interner);
let name = &self.body[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty.clone(),
(BindingMode::Ref(_), _) => {
never!(
"`ref {}` has wrong type {:?}",
@ -170,7 +177,7 @@ impl<'a> PatCtxt<'a> {
hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
let expected_len = variant.unwrap().fields(self.db).fields().len();
let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
self.lower_variant_or_leaf(pat, ty, subpatterns)
self.lower_variant_or_leaf(pat, &ty, subpatterns)
}
hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
@ -186,7 +193,7 @@ impl<'a> PatCtxt<'a> {
})
.collect();
match subpatterns {
Some(subpatterns) => self.lower_variant_or_leaf(pat, ty, subpatterns),
Some(subpatterns) => self.lower_variant_or_leaf(pat, &ty, subpatterns),
None => {
self.errors.push(PatternError::MissingField);
PatKind::Wild
@ -271,12 +278,12 @@ impl<'a> PatCtxt<'a> {
}
fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat {
let ty = &self.infer[pat];
let ty = self.infer[pat].to_chalk(self.interner);
let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) };
match self.infer.variant_resolution_for_pat(pat) {
Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, ty, Vec::new())),
Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, &ty, Vec::new())),
None => {
self.errors.push(PatternError::UnresolvedVariant);
pat_from_kind(PatKind::Wild)

View file

@ -14,6 +14,8 @@ use hir_def::{
};
use span::Edition;
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::utils::TargetFeatureIsSafeInTarget;
use crate::{
InferenceResult, Interner, TargetFeatures, TyExt, TyKind,
@ -96,9 +98,9 @@ enum UnsafeDiagnostic {
DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock },
}
pub fn unsafe_operations_for_body(
db: &dyn HirDatabase,
infer: &InferenceResult,
pub fn unsafe_operations_for_body<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
def: DefWithBodyId,
body: &Body,
callback: &mut dyn FnMut(ExprOrPatId),
@ -115,9 +117,9 @@ pub fn unsafe_operations_for_body(
}
}
pub fn unsafe_operations(
db: &dyn HirDatabase,
infer: &InferenceResult,
pub fn unsafe_operations<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
def: DefWithBodyId,
body: &Body,
current: ExprId,
@ -135,7 +137,7 @@ pub fn unsafe_operations(
struct UnsafeVisitor<'db> {
db: &'db dyn HirDatabase,
infer: &'db InferenceResult,
infer: &'db InferenceResult<'db>,
body: &'db Body,
resolver: Resolver<'db>,
def: DefWithBodyId,
@ -149,12 +151,13 @@ struct UnsafeVisitor<'db> {
/// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
/// the target feature is not enabled. This flag encodes that.
target_feature_is_safe: TargetFeatureIsSafeInTarget,
interner: DbInterner<'db>,
}
impl<'db> UnsafeVisitor<'db> {
fn new(
db: &'db dyn HirDatabase,
infer: &'db InferenceResult,
infer: &'db InferenceResult<'db>,
body: &'db Body,
def: DefWithBodyId,
unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic),
@ -183,6 +186,7 @@ impl<'db> UnsafeVisitor<'db> {
def_target_features,
edition,
target_feature_is_safe,
interner: DbInterner::new_with(db, None, None),
}
}
@ -285,7 +289,7 @@ impl<'db> UnsafeVisitor<'db> {
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
let callee = &self.infer[callee];
let callee = self.infer[callee].to_chalk(self.interner);
if let Some(func) = callee.as_fn_def(self.db) {
self.check_call(current, func);
}
@ -338,7 +342,7 @@ impl<'db> UnsafeVisitor<'db> {
}
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let TyKind::Raw(..) = &self.infer[*expr].kind(Interner) {
if let TyKind::Raw(..) = &self.infer[*expr].to_chalk(self.interner).kind(Interner) {
self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref);
}
}

View file

@ -52,7 +52,7 @@ use crate::{
AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, ConstScalar,
ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData,
LifetimeOutlives, MemoryMap, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause,
TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause, consteval_nextsolver,
TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause, consteval,
db::{HirDatabase, InternedClosure},
from_assoc_type_id, from_placeholder_idx,
generics::generics,
@ -750,8 +750,8 @@ impl<'db> HirDisplay for crate::next_solver::Const<'db> {
}
rustc_type_ir::ConstKind::Value(const_bytes) => render_const_scalar_ns(
f,
&const_bytes.value.inner().0,
&const_bytes.value.inner().1,
&const_bytes.value.inner().memory,
&const_bytes.value.inner().memory_map,
const_bytes.ty,
),
rustc_type_ir::ConstKind::Unevaluated(unev) => {
@ -1025,7 +1025,7 @@ fn render_const_scalar_inner<'db>(
ty.hir_fmt(f)
}
TyKind::Array(ty, len) => {
let Some(len) = consteval_nextsolver::try_const_usize(f.db, len) else {
let Some(len) = consteval::try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
let Ok(layout) = f.db.layout_of_ty(ty, trait_env) else {
@ -1545,14 +1545,17 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> {
never!("Only `impl Fn` is valid for displaying closures in source code");
}
}
let chalk_id: chalk_ir::ClosureId<_> = id.into();
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
return write!(f, "{{closure#{:?}}}", chalk_id.0.index());
return write!(
f,
"{{closure#{:?}}}",
salsa::plumbing::AsId::as_id(&id).index()
);
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", chalk_id.0.index())?;
write!(f, "{{closure#{:?}}}", salsa::plumbing::AsId::as_id(&id).index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
@ -1561,7 +1564,7 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> {
if let Some(sig) = sig {
let InternedClosure(def, _) = db.lookup_intern_closure(id);
let infer = db.infer(def);
let (_, kind) = infer.closure_info(&chalk_id);
let (_, kind) = infer.closure_info(id);
match f.closure_style {
ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
ClosureStyle::RANotation => write!(f, "|")?,

View file

@ -1,18 +1,20 @@
//! Utilities for computing drop info about types.
use chalk_ir::cast::Cast;
use hir_def::AdtId;
use hir_def::lang_item::LangItem;
use hir_def::signatures::StructFlags;
use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use stdx::never;
use triomphe::Arc;
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
TraitEnvironment, consteval,
db::HirDatabase,
method_resolution::TyFingerprint,
next_solver::{
Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
};
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
@ -45,27 +47,52 @@ pub enum DropGlue {
HasDropGlue,
}
pub(crate) fn has_drop_glue(
db: &dyn HirDatabase,
ty: Ty,
env: Arc<TraitEnvironment<'_>>,
pub fn has_drop_glue<'db>(
infcx: &InferCtxt<'db>,
ty: Ty<'db>,
env: Arc<TraitEnvironment<'db>>,
) -> DropGlue {
match ty.kind(Interner) {
TyKind::Adt(adt, subst) => {
if has_destructor(db, adt.0) {
has_drop_glue_impl(infcx, ty, env, &mut FxHashSet::default())
}
fn has_drop_glue_impl<'db>(
infcx: &InferCtxt<'db>,
ty: Ty<'db>,
env: Arc<TraitEnvironment<'db>>,
visited: &mut FxHashSet<Ty<'db>>,
) -> DropGlue {
let mut ocx = ObligationCtxt::new(infcx);
let ty = ocx.structurally_normalize_ty(&ObligationCause::dummy(), env.env, ty).unwrap_or(ty);
if !visited.insert(ty) {
// Recursive type.
return DropGlue::None;
}
let db = infcx.interner.db;
match ty.kind() {
TyKind::Adt(adt_def, subst) => {
let adt_id = adt_def.def_id().0;
if has_destructor(db, adt_id) {
return DropGlue::HasDropGlue;
}
match adt.0 {
match adt_id {
AdtId::StructId(id) => {
if db.struct_signature(id).flags.contains(StructFlags::IS_MANUALLY_DROP) {
if db
.struct_signature(id)
.flags
.intersects(StructFlags::IS_MANUALLY_DROP | StructFlags::IS_PHANTOM_DATA)
{
return DropGlue::None;
}
db.field_types(id.into())
db.field_types_ns(id.into())
.iter()
.map(|(_, field_ty)| {
db.has_drop_glue(
field_ty.clone().substitute(Interner, subst),
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
env.clone(),
visited,
)
})
.max()
@ -78,12 +105,14 @@ pub(crate) fn has_drop_glue(
.variants
.iter()
.map(|&(variant, _, _)| {
db.field_types(variant.into())
db.field_types_ns(variant.into())
.iter()
.map(|(_, field_ty)| {
db.has_drop_glue(
field_ty.clone().substitute(Interner, subst),
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
env.clone(),
visited,
)
})
.max()
@ -93,116 +122,70 @@ pub(crate) fn has_drop_glue(
.unwrap_or(DropGlue::None),
}
}
TyKind::Tuple(_, subst) => subst
.iter(Interner)
.map(|ty| ty.assert_ty_ref(Interner))
.map(|ty| db.has_drop_glue(ty.clone(), env.clone()))
TyKind::Tuple(tys) => tys
.iter()
.map(|ty| has_drop_glue_impl(infcx, ty, env.clone(), visited))
.max()
.unwrap_or(DropGlue::None),
TyKind::Array(ty, len) => {
if let ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Bytes(len, _) }) =
&len.data(Interner).value
{
match (&**len).try_into() {
Ok(len) => {
let len = usize::from_le_bytes(len);
if len == 0 {
// Arrays of size 0 don't have drop glue.
return DropGlue::None;
}
}
Err(_) => {
never!("const array size with non-usize len");
}
}
if consteval::try_const_usize(db, len) == Some(0) {
// Arrays of size 0 don't have drop glue.
return DropGlue::None;
}
db.has_drop_glue(ty.clone(), env)
has_drop_glue_impl(infcx, ty, env, visited)
}
TyKind::Slice(ty) => db.has_drop_glue(ty.clone(), env),
TyKind::Slice(ty) => has_drop_glue_impl(infcx, ty, env, visited),
TyKind::Closure(closure_id, subst) => {
let owner = db.lookup_intern_closure((*closure_id).into()).0;
let owner = db.lookup_intern_closure(closure_id.0).0;
let infer = db.infer(owner);
let (captures, _) = infer.closure_info(closure_id);
let (captures, _) = infer.closure_info(closure_id.0);
let env = db.trait_environment_for_body(owner);
captures
.iter()
.map(|capture| db.has_drop_glue(capture.ty(db, subst), env.clone()))
.map(|capture| {
has_drop_glue_impl(infcx, capture.ty(db, subst), env.clone(), visited)
})
.max()
.unwrap_or(DropGlue::None)
}
// FIXME: Handle coroutines.
TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) => DropGlue::None,
TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) | TyKind::CoroutineClosure(..) => {
DropGlue::None
}
TyKind::Ref(..)
| TyKind::Raw(..)
| TyKind::RawPtr(..)
| TyKind::FnDef(..)
| TyKind::Str
| TyKind::Never
| TyKind::Scalar(_)
| TyKind::Function(_)
| TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_)
| TyKind::FnPtr(..)
| TyKind::Foreign(_)
| TyKind::Error => DropGlue::None,
TyKind::Dyn(_) => DropGlue::HasDropGlue,
TyKind::AssociatedType(assoc_type_id, subst) => projection_has_drop_glue(
db,
env,
ProjectionTy { associated_ty_id: *assoc_type_id, substitution: subst.clone() },
ty,
),
TyKind::Alias(AliasTy::Projection(projection)) => {
projection_has_drop_glue(db, env, projection.clone(), ty)
}
TyKind::OpaqueType(..) | TyKind::Alias(AliasTy::Opaque(_)) => {
if is_copy(db, ty, env) {
| TyKind::Error(_)
| TyKind::Bound(..)
| TyKind::Placeholder(..) => DropGlue::None,
TyKind::Dynamic(..) => DropGlue::HasDropGlue,
TyKind::Alias(..) => {
if infcx.type_is_copy_modulo_regions(env.env, ty) {
DropGlue::None
} else {
DropGlue::HasDropGlue
}
}
TyKind::Placeholder(_) | TyKind::BoundVar(_) => {
if is_copy(db, ty, env) {
TyKind::Param(_) => {
if infcx.type_is_copy_modulo_regions(env.env, ty) {
DropGlue::None
} else {
DropGlue::DependOnParams
}
}
TyKind::InferenceVar(..) => unreachable!("inference vars shouldn't exist out of inference"),
}
}
fn projection_has_drop_glue(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment<'_>>,
projection: ProjectionTy,
ty: Ty,
) -> DropGlue {
let normalized = db.normalize_projection(projection, env.clone());
match normalized.kind(Interner) {
TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(..) => {
if is_copy(db, ty, env) { DropGlue::None } else { DropGlue::DependOnParams }
TyKind::Infer(..) => unreachable!("inference vars shouldn't exist out of inference"),
TyKind::Pat(..) | TyKind::UnsafeBinder(..) => {
never!("we do not handle pattern and unsafe binder types");
DropGlue::None
}
_ => db.has_drop_glue(normalized, env),
}
}
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment<'_>>) -> bool {
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, env.krate) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build();
let goal = Canonical {
value: InEnvironment::new(
&env.env.to_chalk(DbInterner::new_with(db, Some(env.krate), env.block)),
trait_ref.cast(Interner),
),
binders: CanonicalVarKinds::empty(Interner),
};
db.trait_solve(env.krate, env.block, goal).certain()
}
pub(crate) fn has_drop_glue_cycle_result(
_db: &dyn HirDatabase,
_ty: Ty,
_env: Arc<TraitEnvironment<'_>>,
) -> DropGlue {
DropGlue::None
}

View file

@ -57,7 +57,7 @@ fn check_dyn_compatibility<'a>(
};
let mut osvs = FxHashSet::default();
let db = &db;
salsa::attach(db, || {
crate::attach_db(db, || {
_ = dyn_compatibility_with_callback(db, trait_id, &mut |osv| {
osvs.insert(match osv {
DynCompatibilityViolation::SizedSelf => SizedSelf,

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,8 @@
use std::iter;
use rustc_ast_ir::Mutability;
use crate::{
Adjust, Adjustment, OverloadedDeref,
autoderef::{Autoderef, AutoderefKind},
@ -9,7 +11,6 @@ use crate::{
next_solver::{
Ty,
infer::{InferOk, traits::PredicateObligations},
mapping::NextSolverToChalk,
},
};
@ -21,12 +22,12 @@ impl<'db> InferenceTable<'db> {
impl<'db> Autoderef<'_, 'db> {
/// Returns the adjustment steps.
pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment> {
pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment<'db>> {
let infer_ok = self.adjust_steps_as_infer_ok();
self.table.register_infer_ok(infer_ok)
}
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment>> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> {
let steps = self.steps();
if steps.is_empty() {
return InferOk { obligations: PredicateObligations::new(), value: vec![] };
@ -37,16 +38,13 @@ impl<'db> Autoderef<'_, 'db> {
.iter()
.map(|&(_source, kind)| {
if let AutoderefKind::Overloaded = kind {
Some(OverloadedDeref(Some(chalk_ir::Mutability::Not)))
Some(OverloadedDeref(Some(Mutability::Not)))
} else {
None
}
})
.zip(targets)
.map(|(autoderef, target)| Adjustment {
kind: Adjust::Deref(autoderef),
target: target.to_chalk(self.table.interner),
})
.map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
.collect();
InferOk { obligations: self.take_obligations(), value: steps }

View file

@ -1,17 +1,18 @@
//! Type cast logic. Basically coercion + additional casts.
use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy};
use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
Flags, InferTy, TypeFlags, UintTy,
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
};
use stdx::never;
use crate::infer::coerce::CoerceNever;
use crate::{
Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex, QuantifiedWhereClauses, Ty,
TyExt, TyKind, TypeFlags, WhereClause,
InferenceDiagnostic,
db::HirDatabase,
from_chalk_trait_id,
infer::{AllowTwoPhase, InferenceContext},
next_solver::mapping::ChalkToNextSolver,
infer::{AllowTwoPhase, InferenceContext, coerce::CoerceNever},
next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
};
#[derive(Debug)]
@ -25,24 +26,24 @@ pub(crate) enum Int {
}
#[derive(Debug)]
pub(crate) enum CastTy {
pub(crate) enum CastTy<'db> {
Int(Int),
Float,
FnPtr,
Ptr(Ty, Mutability),
Ptr(Ty<'db>, Mutability),
// `DynStar` is Not supported yet in r-a
}
impl CastTy {
pub(crate) fn from_ty(db: &dyn HirDatabase, t: &Ty) -> Option<Self> {
match t.kind(Interner) {
TyKind::Scalar(Scalar::Bool) => Some(Self::Int(Int::Bool)),
TyKind::Scalar(Scalar::Char) => Some(Self::Int(Int::Char)),
TyKind::Scalar(Scalar::Int(_)) => Some(Self::Int(Int::I)),
TyKind::Scalar(Scalar::Uint(it)) => Some(Self::Int(Int::U(*it))),
TyKind::InferenceVar(_, TyVariableKind::Integer) => Some(Self::Int(Int::InferenceVar)),
TyKind::InferenceVar(_, TyVariableKind::Float) => Some(Self::Float),
TyKind::Scalar(Scalar::Float(_)) => Some(Self::Float),
impl<'db> CastTy<'db> {
pub(crate) fn from_ty(db: &dyn HirDatabase, t: Ty<'db>) -> Option<Self> {
match t.kind() {
TyKind::Bool => Some(Self::Int(Int::Bool)),
TyKind::Char => Some(Self::Int(Int::Char)),
TyKind::Int(_) => Some(Self::Int(Int::I)),
TyKind::Uint(it) => Some(Self::Int(Int::U(it))),
TyKind::Infer(InferTy::IntVar(_)) => Some(Self::Int(Int::InferenceVar)),
TyKind::Infer(InferTy::FloatVar(_)) => Some(Self::Float),
TyKind::Float(_) => Some(Self::Float),
TyKind::Adt(..) => {
let (AdtId::EnumId(id), _) = t.as_adt()? else {
return None;
@ -50,8 +51,8 @@ impl CastTy {
let enum_data = id.enum_variants(db);
if enum_data.is_payload_free(db) { Some(Self::Int(Int::CEnum)) } else { None }
}
TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)),
TyKind::Function(_) => Some(Self::FnPtr),
TyKind::RawPtr(ty, m) => Some(Self::Ptr(ty, m)),
TyKind::FnPtr(..) => Some(Self::FnPtr),
_ => None,
}
}
@ -77,37 +78,47 @@ pub enum CastError {
}
impl CastError {
fn into_diagnostic(self, expr: ExprId, expr_ty: Ty, cast_ty: Ty) -> InferenceDiagnostic {
fn into_diagnostic<'db>(
self,
expr: ExprId,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
) -> InferenceDiagnostic<'db> {
InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty }
}
}
#[derive(Clone, Debug)]
pub(super) struct CastCheck {
pub(super) struct CastCheck<'db> {
expr: ExprId,
source_expr: ExprId,
expr_ty: Ty,
cast_ty: Ty,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
}
impl CastCheck {
pub(super) fn new(expr: ExprId, source_expr: ExprId, expr_ty: Ty, cast_ty: Ty) -> Self {
impl<'db> CastCheck<'db> {
pub(super) fn new(
expr: ExprId,
source_expr: ExprId,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
) -> Self {
Self { expr, source_expr, expr_ty, cast_ty }
}
pub(super) fn check(
&mut self,
ctx: &mut InferenceContext<'_>,
) -> Result<(), InferenceDiagnostic> {
self.expr_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone());
self.cast_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone());
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<(), InferenceDiagnostic<'db>> {
self.expr_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty);
self.cast_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty);
// This should always come first so that we apply the coercion, which impacts infer vars.
if ctx
.coerce(
self.source_expr.into(),
self.expr_ty.to_nextsolver(ctx.table.interner),
self.cast_ty.to_nextsolver(ctx.table.interner),
self.expr_ty,
self.cast_ty,
AllowTwoPhase::No,
CoerceNever::Yes,
)
@ -117,83 +128,82 @@ impl CastCheck {
return Ok(());
}
if self.expr_ty.contains_unknown() || self.cast_ty.contains_unknown() {
if self.expr_ty.references_non_lt_error() || self.cast_ty.references_non_lt_error() {
return Ok(());
}
if !self.cast_ty.data(Interner).flags.contains(TypeFlags::HAS_TY_INFER)
&& !ctx.table.is_sized(&self.cast_ty)
if !self.cast_ty.flags().contains(TypeFlags::HAS_TY_INFER)
&& !ctx.table.is_sized(self.cast_ty)
{
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty.clone(),
cast_ty: self.cast_ty,
});
}
// Chalk doesn't support trait upcasting and fails to solve some obvious goals
// when the trait environment contains some recursive traits (See issue #18047)
// We skip cast checks for such cases for now, until the next-gen solver.
if contains_dyn_trait(&self.cast_ty) {
if contains_dyn_trait(self.cast_ty) {
return Ok(());
}
self.do_check(ctx)
.map_err(|e| e.into_diagnostic(self.expr, self.expr_ty.clone(), self.cast_ty.clone()))
self.do_check(ctx).map_err(|e| e.into_diagnostic(self.expr, self.expr_ty, self.cast_ty))
}
fn do_check(&self, ctx: &mut InferenceContext<'_>) -> Result<(), CastError> {
let (t_from, t_cast) = match (
CastTy::from_ty(ctx.db, &self.expr_ty),
CastTy::from_ty(ctx.db, &self.cast_ty),
) {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind(Interner) {
TyKind::FnDef(..) => {
let sig = self.expr_ty.callable_sig(ctx.db).expect("FnDef had no sig");
let sig = ctx.table.eagerly_normalize_and_resolve_shallow_in(sig);
let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
if ctx
.coerce(
self.source_expr.into(),
self.expr_ty.to_nextsolver(ctx.table.interner),
fn_ptr.to_nextsolver(ctx.table.interner),
AllowTwoPhase::No,
CoerceNever::Yes,
)
.is_ok()
{
} else {
return Err(CastError::IllegalCast);
}
(CastTy::FnPtr, t_cast)
}
TyKind::Ref(mutbl, _, inner_ty) => {
return match t_cast {
CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) {
TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
| TyKind::InferenceVar(
_,
TyVariableKind::Integer | TyVariableKind::Float,
) => Err(CastError::NeedDeref),
_ => Err(CastError::NeedViaPtr),
},
// array-ptr-cast
CastTy::Ptr(t, m) => {
let t = ctx.table.eagerly_normalize_and_resolve_shallow_in(t);
if !ctx.table.is_sized(&t) {
return Err(CastError::IllegalCast);
}
self.check_ref_cast(ctx, inner_ty, *mutbl, &t, m)
fn do_check(&self, ctx: &mut InferenceContext<'_, 'db>) -> Result<(), CastError> {
let (t_from, t_cast) =
match (CastTy::from_ty(ctx.db, self.expr_ty), CastTy::from_ty(ctx.db, self.cast_ty)) {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind() {
TyKind::FnDef(..) => {
let sig =
self.expr_ty.callable_sig(ctx.interner()).expect("FnDef had no sig");
let sig = ctx.table.eagerly_normalize_and_resolve_shallow_in(sig);
let fn_ptr = Ty::new_fn_ptr(ctx.interner(), sig);
if ctx
.coerce(
self.source_expr.into(),
self.expr_ty,
fn_ptr,
AllowTwoPhase::No,
CoerceNever::Yes,
)
.is_ok()
{
} else {
return Err(CastError::IllegalCast);
}
_ => Err(CastError::NonScalar),
};
}
(CastTy::FnPtr, t_cast)
}
TyKind::Ref(_, inner_ty, mutbl) => {
return match t_cast {
CastTy::Int(_) | CastTy::Float => match inner_ty.kind() {
TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_)
| TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => {
Err(CastError::NeedDeref)
}
_ => Err(CastError::NeedViaPtr),
},
// array-ptr-cast
CastTy::Ptr(t, m) => {
let t = ctx.table.eagerly_normalize_and_resolve_shallow_in(t);
if !ctx.table.is_sized(t) {
return Err(CastError::IllegalCast);
}
self.check_ref_cast(ctx, inner_ty, mutbl, t, m)
}
_ => Err(CastError::NonScalar),
};
}
_ => return Err(CastError::NonScalar),
},
_ => return Err(CastError::NonScalar),
},
_ => return Err(CastError::NonScalar),
};
};
// rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym
@ -207,10 +217,10 @@ impl CastCheck {
}
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
| (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
(CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, &src, &dst),
(CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, &src),
(CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, &dst),
(CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, &dst),
(CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst),
(CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, src),
(CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, dst),
(CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, dst),
(CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
(CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
(CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
@ -220,23 +230,23 @@ impl CastCheck {
fn check_ref_cast(
&self,
ctx: &mut InferenceContext<'_>,
t_expr: &Ty,
ctx: &mut InferenceContext<'_, 'db>,
t_expr: Ty<'db>,
m_expr: Mutability,
t_cast: &Ty,
t_cast: Ty<'db>,
m_cast: Mutability,
) -> Result<(), CastError> {
// Mutability order is opposite to rustc. `Mut < Not`
if m_expr <= m_cast
&& let TyKind::Array(ety, _) = t_expr.kind(Interner)
&& let TyKind::Array(ety, _) = t_expr.kind()
{
// Coerce to a raw pointer so that we generate RawPtr in MIR.
let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner);
let array_ptr_type = Ty::new_ptr(ctx.interner(), t_expr, m_expr);
if ctx
.coerce(
self.source_expr.into(),
self.expr_ty.to_nextsolver(ctx.table.interner),
array_ptr_type.to_nextsolver(ctx.table.interner),
self.expr_ty,
array_ptr_type,
AllowTwoPhase::No,
CoerceNever::Yes,
)
@ -253,13 +263,7 @@ impl CastCheck {
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
if ctx
.coerce(
self.source_expr.into(),
ety.to_nextsolver(ctx.table.interner),
t_cast.to_nextsolver(ctx.table.interner),
AllowTwoPhase::No,
CoerceNever::Yes,
)
.coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, CoerceNever::Yes)
.is_ok()
{
return Ok(());
@ -271,9 +275,9 @@ impl CastCheck {
fn check_ptr_ptr_cast(
&self,
ctx: &mut InferenceContext<'_>,
src: &Ty,
dst: &Ty,
ctx: &mut InferenceContext<'_, 'db>,
src: Ty<'db>,
dst: Ty<'db>,
) -> Result<(), CastError> {
let src_kind = pointer_kind(src, ctx).map_err(|_| CastError::Unknown)?;
let dst_kind = pointer_kind(dst, ctx).map_err(|_| CastError::Unknown)?;
@ -286,24 +290,13 @@ impl CastCheck {
(_, Some(PointerKind::Thin)) => Ok(()),
(Some(PointerKind::Thin), _) => Err(CastError::SizedUnsizedCast),
(Some(PointerKind::VTable(src_tty)), Some(PointerKind::VTable(dst_tty))) => {
let principal = |tty: &Binders<QuantifiedWhereClauses>| {
tty.skip_binders().as_slice(Interner).first().and_then(|pred| {
if let WhereClause::Implemented(tr) = pred.skip_binders() {
Some(tr.trait_id)
} else {
None
}
})
};
match (principal(&src_tty), principal(&dst_tty)) {
match (src_tty.principal_def_id(), dst_tty.principal_def_id()) {
(Some(src_principal), Some(dst_principal)) => {
if src_principal == dst_principal {
return Ok(());
}
let src_principal =
ctx.db.trait_signature(from_chalk_trait_id(src_principal));
let dst_principal =
ctx.db.trait_signature(from_chalk_trait_id(dst_principal));
let src_principal = ctx.db.trait_signature(src_principal.0);
let dst_principal = ctx.db.trait_signature(dst_principal.0);
if src_principal.flags.contains(TraitFlags::AUTO)
&& dst_principal.flags.contains(TraitFlags::AUTO)
{
@ -322,8 +315,8 @@ impl CastCheck {
fn check_ptr_addr_cast(
&self,
ctx: &mut InferenceContext<'_>,
expr_ty: &Ty,
ctx: &mut InferenceContext<'_, 'db>,
expr_ty: Ty<'db>,
) -> Result<(), CastError> {
match pointer_kind(expr_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownExprPtrKind),
@ -336,8 +329,8 @@ impl CastCheck {
fn check_addr_ptr_cast(
&self,
ctx: &mut InferenceContext<'_>,
cast_ty: &Ty,
ctx: &mut InferenceContext<'_, 'db>,
cast_ty: Ty<'db>,
) -> Result<(), CastError> {
match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownCastPtrKind),
@ -352,8 +345,8 @@ impl CastCheck {
fn check_fptr_ptr_cast(
&self,
ctx: &mut InferenceContext<'_>,
cast_ty: &Ty,
ctx: &mut InferenceContext<'_, 'db>,
cast_ty: Ty<'db>,
) -> Result<(), CastError> {
match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownCastPtrKind),
@ -366,30 +359,34 @@ impl CastCheck {
}
#[derive(Debug, PartialEq, Eq)]
enum PointerKind {
enum PointerKind<'db> {
// thin pointer
Thin,
// trait object
VTable(Binders<QuantifiedWhereClauses>),
VTable(BoundExistentialPredicates<'db>),
// slice
Length,
OfAlias,
OfParam(PlaceholderIndex),
OfParam(ParamTy),
Error,
}
fn pointer_kind(ty: &Ty, ctx: &mut InferenceContext<'_>) -> Result<Option<PointerKind>, ()> {
let ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(ty.clone());
fn pointer_kind<'db>(
ty: Ty<'db>,
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<Option<PointerKind<'db>>, ()> {
let ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(ty);
if ctx.table.is_sized(&ty) {
if ctx.table.is_sized(ty) {
return Ok(Some(PointerKind::Thin));
}
match ty.kind(Interner) {
match ty.kind() {
TyKind::Slice(_) | TyKind::Str => Ok(Some(PointerKind::Length)),
TyKind::Dyn(DynTy { bounds, .. }) => Ok(Some(PointerKind::VTable(bounds.clone()))),
TyKind::Adt(chalk_ir::AdtId(id), subst) => {
let AdtId::StructId(id) = *id else {
TyKind::Dynamic(bounds, _) => Ok(Some(PointerKind::VTable(bounds))),
TyKind::Adt(adt_def, subst) => {
let id = adt_def.def_id().0;
let AdtId::StructId(id) = id else {
never!("`{:?}` should be sized but is not?", ty);
return Err(());
};
@ -397,69 +394,63 @@ fn pointer_kind(ty: &Ty, ctx: &mut InferenceContext<'_>) -> Result<Option<Pointe
let struct_data = id.fields(ctx.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
ctx.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
pointer_kind(&last_field_ty, ctx)
ctx.db.field_types_ns(id.into())[last_field].instantiate(ctx.interner(), subst);
pointer_kind(last_field_ty, ctx)
} else {
Ok(Some(PointerKind::Thin))
}
}
TyKind::Tuple(_, subst) => {
match subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) {
None => Ok(Some(PointerKind::Thin)),
Some(ty) => pointer_kind(ty, ctx),
}
}
TyKind::Tuple(subst) => match subst.iter().next_back() {
None => Ok(Some(PointerKind::Thin)),
Some(ty) => pointer_kind(ty, ctx),
},
TyKind::Foreign(_) => Ok(Some(PointerKind::Thin)),
TyKind::Alias(_) | TyKind::AssociatedType(..) | TyKind::OpaqueType(..) => {
Ok(Some(PointerKind::OfAlias))
}
TyKind::Error => Ok(Some(PointerKind::Error)),
TyKind::Placeholder(idx) => Ok(Some(PointerKind::OfParam(*idx))),
TyKind::BoundVar(_) | TyKind::InferenceVar(..) => Ok(None),
TyKind::Scalar(_)
TyKind::Alias(..) => Ok(Some(PointerKind::OfAlias)),
TyKind::Error(_) => Ok(Some(PointerKind::Error)),
TyKind::Param(idx) => Ok(Some(PointerKind::OfParam(idx))),
TyKind::Bound(..) | TyKind::Placeholder(..) | TyKind::Infer(..) => Ok(None),
TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_)
| TyKind::Bool
| TyKind::Char
| TyKind::Array(..)
| TyKind::CoroutineWitness(..)
| TyKind::Raw(..)
| TyKind::RawPtr(..)
| TyKind::Ref(..)
| TyKind::FnDef(..)
| TyKind::Function(_)
| TyKind::FnPtr(..)
| TyKind::Closure(..)
| TyKind::Coroutine(..)
| TyKind::CoroutineClosure(..)
| TyKind::Never => {
never!("`{:?}` should be sized but is not?", ty);
Err(())
}
TyKind::UnsafeBinder(..) | TyKind::Pat(..) => {
never!("we don't produce these types: {ty:?}");
Err(())
}
}
}
fn contains_dyn_trait(ty: &Ty) -> bool {
fn contains_dyn_trait<'db>(ty: Ty<'db>) -> bool {
use std::ops::ControlFlow;
use chalk_ir::{
DebruijnIndex,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
};
use rustc_type_ir::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
struct DynTraitVisitor;
impl TypeVisitor<Interner> for DynTraitVisitor {
type BreakTy = ();
impl<'db> TypeVisitor<DbInterner<'db>> for DynTraitVisitor {
type Result = ControlFlow<()>;
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
match ty.kind(Interner) {
TyKind::Dyn(_) => ControlFlow::Break(()),
_ => ty.super_visit_with(self.as_dyn(), outer_binder),
fn visit_ty(&mut self, ty: Ty<'db>) -> ControlFlow<()> {
match ty.kind() {
TyKind::Dynamic(..) => ControlFlow::Break(()),
_ => ty.super_visit_with(self),
}
}
}
ty.visit_with(DynTraitVisitor.as_dyn(), DebruijnIndex::INNERMOST).is_break()
ty.visit_with(&mut DynTraitVisitor).is_break()
}

View file

@ -2,8 +2,7 @@
pub(crate) mod analysis;
use std::ops::ControlFlow;
use std::{iter, mem};
use std::{iter, mem, ops::ControlFlow};
use hir_def::{
TraitId,
@ -18,7 +17,6 @@ use rustc_type_ir::{
};
use tracing::debug;
use crate::traits::FnTrait;
use crate::{
FnAbi,
db::{InternedClosure, InternedCoroutine},
@ -31,25 +29,25 @@ use crate::{
BoundRegionConversionTime, DefineOpaqueTypes, InferOk, InferResult,
traits::{ObligationCause, PredicateObligations},
},
mapping::{ChalkToNextSolver, NextSolverToChalk},
util::explicit_item_bounds,
},
traits::FnTrait,
};
use super::{Expectation, InferenceContext};
#[derive(Debug)]
struct ClosureSignatures<'tcx> {
struct ClosureSignatures<'db> {
/// The signature users of the closure see.
bound_sig: PolyFnSig<'tcx>,
bound_sig: PolyFnSig<'db>,
/// The signature within the function body.
/// This mostly differs in the sense that lifetimes are now early bound and any
/// opaque types from the signature expectation are overridden in case there are
/// explicit hidden types written by the user in the closure signature.
liberated_sig: FnSig<'tcx>,
liberated_sig: FnSig<'db>,
}
impl<'db> InferenceContext<'db> {
impl<'db> InferenceContext<'_, 'db> {
pub(super) fn infer_closure(
&mut self,
body: ExprId,
@ -58,15 +56,13 @@ impl<'db> InferenceContext<'db> {
arg_types: &[Option<TypeRefId>],
closure_kind: ClosureKind,
tgt_expr: ExprId,
expected: &Expectation,
) -> crate::Ty {
expected: &Expectation<'db>,
) -> Ty<'db> {
assert_eq!(args.len(), arg_types.len());
let interner = self.table.interner;
let interner = self.interner();
let (expected_sig, expected_kind) = match expected.to_option(&mut self.table) {
Some(expected_ty) => {
self.deduce_closure_signature(expected_ty.to_nextsolver(interner), closure_kind)
}
Some(expected_ty) => self.deduce_closure_signature(expected_ty, closure_kind),
None => (None, None),
};
@ -79,10 +75,7 @@ impl<'db> InferenceContext<'db> {
let (id, ty, resume_yield_tys) = match closure_kind {
ClosureKind::Coroutine(_) => {
let yield_ty = self.table.next_ty_var();
let resume_ty = liberated_sig
.inputs()
.get(0)
.unwrap_or(self.result.standard_types.unit.to_nextsolver(interner));
let resume_ty = liberated_sig.inputs().get(0).unwrap_or(self.types.unit);
// FIXME: Infer the upvars later.
let parts = CoroutineArgsParts {
@ -102,11 +95,7 @@ impl<'db> InferenceContext<'db> {
CoroutineArgs::new(interner, parts).args,
);
(
None,
coroutine_ty,
Some((resume_ty.to_chalk(interner), yield_ty.to_chalk(interner))),
)
(None, coroutine_ty, Some((resume_ty, yield_ty)))
}
// FIXME(next-solver): `ClosureKind::Async` should really be a separate arm that creates a `CoroutineClosure`.
// But for now we treat it as a closure.
@ -115,7 +104,7 @@ impl<'db> InferenceContext<'db> {
match expected_kind {
Some(kind) => {
self.result.closure_info.insert(
closure_id.into(),
closure_id,
(
Vec::new(),
match kind {
@ -151,13 +140,13 @@ impl<'db> InferenceContext<'db> {
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(bound_sig.skip_binder().inputs()) {
self.infer_top_pat(*arg_pat, &arg_ty.to_chalk(interner), None);
self.infer_top_pat(*arg_pat, arg_ty, None);
}
// FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, id);
let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.to_chalk(interner));
let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty);
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty));
let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
@ -171,7 +160,7 @@ impl<'db> InferenceContext<'db> {
self.current_closure = prev_closure;
self.resume_yield_tys = prev_resume_yield_tys;
ty.to_chalk(interner)
ty
}
fn fn_trait_kind_from_def_id(&self, trait_id: TraitId) -> Option<rustc_type_ir::ClosureKind> {
@ -209,14 +198,13 @@ impl<'db> InferenceContext<'db> {
.deduce_closure_signature_from_predicates(
expected_ty,
closure_kind,
explicit_item_bounds(self.table.interner, def_id)
.iter_instantiated(self.table.interner, args)
explicit_item_bounds(self.interner(), def_id)
.iter_instantiated(self.interner(), args)
.map(|clause| clause.as_predicate()),
),
TyKind::Dynamic(object_type, ..) => {
let sig = object_type.projection_bounds().into_iter().find_map(|pb| {
let pb =
pb.with_self_ty(self.table.interner, Ty::new_unit(self.table.interner));
let pb = pb.with_self_ty(self.interner(), Ty::new_unit(self.interner()));
self.deduce_sig_from_projection(closure_kind, pb)
});
let kind = object_type
@ -226,7 +214,7 @@ impl<'db> InferenceContext<'db> {
}
TyKind::Infer(rustc_type_ir::TyVar(vid)) => self
.deduce_closure_signature_from_predicates(
Ty::new_var(self.table.interner, self.table.infer_ctxt.root_var(vid)),
Ty::new_var(self.interner(), self.table.infer_ctxt.root_var(vid)),
closure_kind,
self.table.obligations_for_self_ty(vid).into_iter().map(|obl| obl.predicate),
),
@ -251,7 +239,7 @@ impl<'db> InferenceContext<'db> {
let mut expected_kind = None;
for pred in rustc_type_ir::elaborate::elaborate(
self.table.interner,
self.interner(),
// Reverse the obligations here, since `elaborate_*` uses a stack,
// and we want to keep inference generally in the same order of
// the registered obligations.
@ -313,7 +301,7 @@ impl<'db> InferenceContext<'db> {
// even though the normalized form may not name `expected_ty`. However, this matches the existing
// behaviour of the old solver and would be technically a breaking change to fix.
let generalized_fnptr_sig = self.table.next_ty_var();
let inferred_fnptr_sig = Ty::new_fn_ptr(self.table.interner, inferred_sig);
let inferred_fnptr_sig = Ty::new_fn_ptr(self.interner(), inferred_sig);
// FIXME: Report diagnostics.
_ = self
.table
@ -326,7 +314,7 @@ impl<'db> InferenceContext<'db> {
self.table.infer_ctxt.resolve_vars_if_possible(generalized_fnptr_sig);
if resolved_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() {
expected_sig = Some(resolved_sig.fn_sig(self.table.interner));
expected_sig = Some(resolved_sig.fn_sig(self.interner()));
}
} else if inferred_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() {
expected_sig = inferred_sig;
@ -339,7 +327,7 @@ impl<'db> InferenceContext<'db> {
// many viable options, so pick the most restrictive.
let trait_def_id = match bound_predicate.skip_binder() {
PredicateKind::Clause(ClauseKind::Projection(data)) => {
Some(data.projection_term.trait_def_id(self.table.interner).0)
Some(data.projection_term.trait_def_id(self.interner()).0)
}
PredicateKind::Clause(ClauseKind::Trait(data)) => Some(data.def_id().0),
_ => None,
@ -427,7 +415,7 @@ impl<'db> InferenceContext<'db> {
let ret_param_ty = projection.skip_binder().term.expect_type();
debug!(?ret_param_ty);
let sig = projection.rebind(self.table.interner.mk_fn_sig(
let sig = projection.rebind(self.interner().mk_fn_sig(
input_tys,
ret_param_ty,
false,
@ -515,7 +503,7 @@ impl<'db> InferenceContext<'db> {
// that does not misuse a `FnSig` type, but that can be done separately.
let return_ty = return_ty.unwrap_or_else(|| self.table.next_ty_var());
let sig = projection.rebind(self.table.interner.mk_fn_sig(
let sig = projection.rebind(self.interner().mk_fn_sig(
input_tys,
return_ty,
false,
@ -619,7 +607,7 @@ impl<'db> InferenceContext<'db> {
// in this binder we are creating.
assert!(!expected_sig.skip_binder().has_vars_bound_above(rustc_type_ir::INNERMOST));
let bound_sig = expected_sig.map_bound(|sig| {
self.table.interner.mk_fn_sig(
self.interner().mk_fn_sig(
sig.inputs(),
sig.output(),
sig.c_variadic,
@ -631,7 +619,7 @@ impl<'db> InferenceContext<'db> {
// `deduce_expectations_from_expected_type` introduces
// late-bound lifetimes defined elsewhere, which we now
// anonymize away, so as not to confuse the user.
let bound_sig = self.table.interner.anonymize_bound_vars(bound_sig);
let bound_sig = self.interner().anonymize_bound_vars(bound_sig);
let closure_sigs = self.closure_sigs(bound_sig);
@ -723,7 +711,7 @@ impl<'db> InferenceContext<'db> {
.into_iter()
.map(|ty| table.infer_ctxt.resolve_vars_if_possible(ty));
expected_sigs.liberated_sig = table.interner.mk_fn_sig(
expected_sigs.liberated_sig = table.interner().mk_fn_sig(
inputs,
supplied_output_ty,
expected_sigs.liberated_sig.c_variadic,
@ -744,12 +732,12 @@ impl<'db> InferenceContext<'db> {
decl_inputs: &[Option<TypeRefId>],
decl_output: Option<TypeRefId>,
) -> PolyFnSig<'db> {
let interner = self.table.interner;
let interner = self.interner();
let supplied_return = match decl_output {
Some(output) => {
let output = self.make_body_ty(output);
self.process_user_written_ty(output).to_nextsolver(interner)
self.process_user_written_ty(output)
}
None => self.table.next_ty_var(),
};
@ -757,7 +745,7 @@ impl<'db> InferenceContext<'db> {
let supplied_arguments = decl_inputs.iter().map(|&input| match input {
Some(input) => {
let input = self.make_body_ty(input);
self.process_user_written_ty(input).to_nextsolver(interner)
self.process_user_written_ty(input)
}
None => self.table.next_ty_var(),
});
@ -779,7 +767,7 @@ impl<'db> InferenceContext<'db> {
decl_inputs: &[Option<TypeRefId>],
decl_output: Option<TypeRefId>,
) -> PolyFnSig<'db> {
let interner = self.table.interner;
let interner = self.interner();
let err_ty = Ty::new_error(interner, ErrorGuaranteed);
if let Some(output) = decl_output {

View file

@ -2,10 +2,6 @@
use std::{cmp, convert::Infallible, mem};
use chalk_ir::{
BoundVar, DebruijnIndex, Mutability, TyKind,
fold::{FallibleTypeFolder, TypeFoldable},
};
use either::Either;
use hir_def::{
DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
@ -20,39 +16,37 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
use crate::db::InternedClosureId;
use crate::infer::InferenceContext;
use crate::{
Adjust, Adjustment, Binders, BindingMode, ClosureId, Interner, Substitution, Ty, TyExt,
db::{HirDatabase, InternedClosure},
error_lifetime, from_placeholder_idx,
generics::Generics,
make_binders,
Adjust, Adjustment, BindingMode,
db::{HirDatabase, InternedClosure, InternedClosureId},
infer::InferenceContext,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
next_solver::{DbInterner, EarlyBinder, GenericArgs, Ty, TyKind},
traits::FnTrait,
utils,
};
// The below functions handle capture and closure kind (Fn, FnMut, ..)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(crate) struct HirPlace {
pub(crate) struct HirPlace<'db> {
pub(crate) local: BindingId,
pub(crate) projections: Vec<ProjectionElem<Infallible, Ty>>,
pub(crate) projections: Vec<ProjectionElem<Infallible, Ty<'db>>>,
}
impl HirPlace {
fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
impl<'db> HirPlace<'db> {
fn ty(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local]);
for p in &self.projections {
ty = p.projected_ty(
&ctx.table.infer_ctxt,
ty,
ctx.db,
|_, _, _| {
unreachable!("Closure field only happens in MIR");
},
@ -86,8 +80,8 @@ pub enum CaptureKind {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub struct CapturedItem<'db> {
pub(crate) place: HirPlace<'db>,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
///
@ -96,10 +90,10 @@ pub struct CapturedItem {
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
pub(crate) ty: Binders<Ty>,
pub(crate) ty: EarlyBinder<'db, Ty<'db>>,
}
impl CapturedItem {
impl<'db> CapturedItem<'db> {
pub fn local(&self) -> BindingId {
self.place.local
}
@ -109,8 +103,9 @@ impl CapturedItem {
self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
}
pub fn ty(&self, db: &dyn HirDatabase, subst: &Substitution) -> Ty {
self.ty.clone().substitute(Interner, &utils::ClosureSubst(subst).parent_subst(db))
pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_with(db, None, None);
self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args)
}
pub fn kind(&self) -> CaptureKind {
@ -279,15 +274,15 @@ impl CapturedItem {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) struct CapturedItemWithoutTy<'db> {
pub(crate) place: HirPlace<'db>,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
}
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
impl<'db> CapturedItemWithoutTy<'db> {
fn with_ty(self, ctx: &mut InferenceContext<'_, 'db>) -> CapturedItem<'db> {
let ty = self.place.ty(ctx);
let ty = match &self.kind {
CaptureKind::ByValue => ty,
@ -296,66 +291,20 @@ impl CapturedItemWithoutTy {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
TyKind::Ref(m, error_lifetime(), ty).intern(Interner)
Ty::new_ref(ctx.interner(), ctx.types.re_error, ty, m)
}
};
return CapturedItem {
CapturedItem {
place: self.place,
kind: self.kind,
span_stacks: self.span_stacks,
ty: replace_placeholder_with_binder(ctx, ty),
};
fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
generics: &'a Generics,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = ();
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_free_placeholder_const(
&mut self,
ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx).0;
let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx).0;
let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
}
}
let filler = &mut Filler { db: ctx.db, generics: ctx.generics() };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
make_binders(ctx.db, filler.generics, result)
ty: EarlyBinder::bind(ty),
}
}
}
impl InferenceContext<'_> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
impl<'db> InferenceContext<'_, 'db> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let adjustments =
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
@ -363,7 +312,7 @@ impl InferenceContext<'_> {
}
/// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace<'db>> {
if path.type_anchor().is_some() {
return None;
}
@ -384,7 +333,7 @@ impl InferenceContext<'_> {
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
self.current_capture_span_stack.clear();
match &self.body[tgt_expr] {
Expr::Path(p) => {
@ -403,8 +352,8 @@ impl InferenceContext<'_> {
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
self.expr_ty_after_adjustments(*expr).kind(),
TyKind::Ref(..) | TyKind::RawPtr(..)
) {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
@ -417,7 +366,7 @@ impl InferenceContext<'_> {
None
}
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
fn push_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
self.current_captures.push(CapturedItemWithoutTy {
place,
kind,
@ -425,7 +374,11 @@ impl InferenceContext<'_> {
});
}
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
fn truncate_capture_spans(
&self,
capture: &mut CapturedItemWithoutTy<'db>,
mut truncate_to: usize,
) {
// The first span is the identifier, and it must always remain.
truncate_to += 1;
for span_stack in &mut capture.span_stacks {
@ -450,14 +403,14 @@ impl InferenceContext<'_> {
}
}
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
if let Some(place) = place {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
fn add_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
if self.is_upvar(&place) {
self.push_capture(place, kind);
}
@ -473,7 +426,7 @@ impl InferenceContext<'_> {
}
}
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
if let Some(place) = place {
self.add_capture(
place,
@ -490,7 +443,7 @@ impl InferenceContext<'_> {
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace) {
fn consume_place(&mut self, place: HirPlace<'db>) {
if self.is_upvar(&place) {
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
@ -502,7 +455,7 @@ impl InferenceContext<'_> {
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment<'db>]) {
if let Some((last, rest)) = adjustment.split_last() {
match &last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
@ -523,7 +476,12 @@ impl InferenceContext<'_> {
}
}
fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
fn ref_capture_with_adjusts(
&mut self,
m: Mutability,
tgt_expr: ExprId,
rest: &[Adjustment<'db>],
) {
let capture_kind = match m {
Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
@ -652,8 +610,8 @@ impl InferenceContext<'_> {
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
self.expr_ty_after_adjustments(*expr).kind(),
TyKind::Ref(..) | TyKind::RawPtr(..)
) {
self.select_from_expr(*expr);
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
@ -728,12 +686,12 @@ impl InferenceContext<'_> {
}
Expr::Closure { .. } => {
let ty = self.expr_ty(tgt_expr);
let TyKind::Closure(id, _) = ty.kind(Interner) else {
let TyKind::Closure(id, _) = ty.kind() else {
never!("closure type is always closure");
return;
};
let (captures, _) =
self.result.closure_info.get(id).expect(
self.result.closure_info.get(&id.0).expect(
"We sort closures, so we should always have data for inner closures",
);
let mut cc = mem::take(&mut self.current_captures);
@ -830,7 +788,7 @@ impl InferenceContext<'_> {
}
Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
if self.is_ty_copy(self.result.type_of_binding[*id]) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
} else {
update_result(CaptureKind::ByValue);
@ -848,21 +806,21 @@ impl InferenceContext<'_> {
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
fn expr_ty(&self, expr: ExprId) -> Ty {
self.result[expr].clone()
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result[expr]
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
let mut ty = None;
if let Some(it) = self.result.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target.clone());
ty = Some(it.target);
}
ty.unwrap_or_else(|| self.expr_ty(e))
}
fn is_upvar(&self, place: &HirPlace) -> bool {
fn is_upvar(&self, place: &HirPlace<'db>) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
return self.body.is_binding_upvar(place.local, root);
@ -870,14 +828,20 @@ impl InferenceContext<'_> {
false
}
fn is_ty_copy(&mut self, ty: Ty) -> bool {
if let TyKind::Closure(id, _) = ty.kind(Interner) {
fn is_ty_copy(&mut self, ty: Ty<'db>) -> bool {
if let TyKind::Closure(id, _) = ty.kind() {
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
// should probably let chalk know which closures are copy, but I don't know how doing it
// without creating query cycles.
return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
return self
.result
.closure_info
.get(&id.0)
.map(|it| it.1 == FnTrait::Fn)
.unwrap_or(true);
}
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
let ty = self.table.resolve_completely(ty);
self.table.type_is_copy_modulo_regions(ty)
}
fn select_from_expr(&mut self, expr: ExprId) {
@ -888,8 +852,8 @@ impl InferenceContext<'_> {
// FIXME: Borrow checker problems without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
if ty.as_raw_ptr().is_some() || ty.is_union() {
let mut ty = self.table.resolve_completely(self.result[capture.place.local]);
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, 0);
capture.place.projections.truncate(0);
@ -897,14 +861,14 @@ impl InferenceContext<'_> {
}
for (i, p) in capture.place.projections.iter().enumerate() {
ty = p.projected_ty(
&self.table.infer_ctxt,
ty,
self.db,
|_, _, _| {
unreachable!("Closure field only happens in MIR");
},
self.owner.module(self.db).krate(),
);
if ty.as_raw_ptr().is_some() || ty.is_union() {
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, i + 1);
capture.place.projections.truncate(i + 1);
@ -932,7 +896,7 @@ impl InferenceContext<'_> {
fn minimize_captures(&mut self) {
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let mut hash_map = FxHashMap::<HirPlace<'db>, usize>::default();
let result = mem::take(&mut self.current_captures);
for mut item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
@ -967,7 +931,7 @@ impl InferenceContext<'_> {
}
}
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
fn consume_with_pat(&mut self, mut place: HirPlace<'db>, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
@ -978,8 +942,8 @@ impl InferenceContext<'_> {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result[tgt_pat].kind(Interner) {
TyKind::Tuple(_, s) => s.len(Interner),
let field_count = match self.result[tgt_pat].kind() {
TyKind::Tuple(s) => s.len(),
_ => break 'reset_span_stack,
};
let fields = 0..field_count;
@ -1125,9 +1089,9 @@ impl InferenceContext<'_> {
r
}
fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into());
self.current_closure = Some(closure.into());
fn analyze_closure(&mut self, closure: InternedClosureId) -> FnTrait {
let InternedClosure(_, root) = self.db.lookup_intern_closure(closure);
self.current_closure = Some(closure);
let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
unreachable!("Closure expression id is always closure");
};
@ -1193,9 +1157,9 @@ impl InferenceContext<'_> {
self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
self.write_fn_trait_method_resolution(
kind,
&derefed_callee,
derefed_callee,
&mut adjustments,
&callee_ty,
callee_ty,
&params,
expr,
);
@ -1213,27 +1177,26 @@ impl InferenceContext<'_> {
///
/// These dependencies are collected in the main inference. We do a topological sort in this function. It
/// will consume the `deferred_closures` field and return its content in a sorted vector.
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
fn sort_closures(
&mut self,
) -> Vec<(InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>)> {
let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> =
deferred_closures.keys().map(|it| ((*it).into(), 0)).collect();
let mut dependents_count: FxHashMap<InternedClosureId, usize> =
deferred_closures.keys().map(|it| (*it, 0)).collect();
for deps in self.closure_dependencies.values() {
for dep in deps {
*dependents_count.entry((*dep).into()).or_default() += 1;
*dependents_count.entry(*dep).or_default() += 1;
}
}
let mut queue: Vec<_> = deferred_closures
.keys()
.copied()
.filter(|&it| dependents_count[&it.into()] == 0)
.collect();
let mut queue: Vec<_> =
deferred_closures.keys().copied().filter(|&it| dependents_count[&it] == 0).collect();
let mut result = vec![];
while let Some(it) = queue.pop() {
if let Some(d) = deferred_closures.remove(&it) {
result.push((it.into(), d));
result.push((it, d));
}
for &dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
let cnt = dependents_count.get_mut(&dep.into()).unwrap();
let cnt = dependents_count.get_mut(&dep).unwrap();
*cnt -= 1;
if *cnt == 0 {
queue.push(dep);
@ -1279,11 +1242,11 @@ impl InferenceContext<'_> {
}
/// Call this only when the last span in the stack isn't a split.
fn apply_adjusts_to_place(
fn apply_adjusts_to_place<'db>(
current_capture_span_stack: &mut Vec<MirSpan>,
mut r: HirPlace,
adjustments: &[Adjustment],
) -> Option<HirPlace> {
mut r: HirPlace<'db>,
adjustments: &[Adjustment<'db>],
) -> Option<HirPlace<'db>> {
let span = *current_capture_span_stack.last().expect("empty capture span stack");
for adj in adjustments {
match &adj.kind {

View file

@ -35,7 +35,6 @@
//! // and are then unable to coerce `&7i32` to `&mut i32`.
//! ```
use chalk_ir::cast::Cast;
use hir_def::{
CallableDefId,
hir::{ExprId, ExprOrPatId},
@ -45,29 +44,30 @@ use hir_def::{
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
TypeAndMut,
BoundVar, TypeAndMut,
error::TypeError,
inherent::{IntoKind, Safety, Ty as _},
inherent::{Const as _, GenericArg as _, IntoKind, Safety, SliceLike, Ty as _},
};
use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};
use triomphe::Arc;
use crate::{
Adjust, Adjustment, AutoBorrow, Interner, PointerCast, TargetFeatures, TraitEnvironment,
Adjust, Adjustment, AutoBorrow, PointerCast, TargetFeatures, TraitEnvironment,
autoderef::Autoderef,
db::{HirDatabase, InternedClosureId},
infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable},
next_solver::{
Binder, CallableIdWrapper, ClauseKind, CoercePredicate, DbInterner, ErrorGuaranteed,
GenericArgs, PolyFnSig, PredicateKind, Region, SolverDefId, TraitRef, Ty, TyKind,
Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, CallableIdWrapper,
Canonical, ClauseKind, CoercePredicate, Const, ConstKind, DbInterner, ErrorGuaranteed,
GenericArgs, PolyFnSig, PredicateKind, Region, RegionKind, SolverDefId, TraitRef, Ty,
TyKind,
infer::{
DefineOpaqueTypes, InferCtxt, InferOk, InferResult,
relate::RelateResult,
select::{ImplSource, SelectionError},
traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations},
},
mapping::{ChalkToNextSolver, NextSolverToChalk},
obligation_ctxt::ObligationCtxt,
},
utils::TargetFeatureIsSafeInTarget,
@ -93,7 +93,7 @@ struct Coerce<'a, 'b, 'db> {
cause: ObligationCause,
}
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment>, Ty<'db>)>;
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment<'db>>, Ty<'db>)>;
/// Coercing a mutable reference to an immutable works, while
/// coercing `&T` to `&mut T` should be forbidden.
@ -103,7 +103,7 @@ fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateRes
/// This always returns `Ok(...)`.
fn success<'db>(
adj: Vec<Adjustment>,
adj: Vec<Adjustment<'db>>,
target: Ty<'db>,
obligations: PredicateObligations<'db>,
) -> CoerceResult<'db> {
@ -118,7 +118,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
#[inline]
fn interner(&self) -> DbInterner<'db> {
self.table.interner
self.table.interner()
}
#[inline]
@ -182,17 +182,14 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
&mut self,
a: Ty<'db>,
b: Ty<'db>,
adjustments: impl IntoIterator<Item = Adjustment>,
final_adjustment: Adjust,
adjustments: impl IntoIterator<Item = Adjustment<'db>>,
final_adjustment: Adjust<'db>,
) -> CoerceResult<'db> {
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
success(
adjustments
.into_iter()
.chain(std::iter::once(Adjustment {
target: ty.to_chalk(self.interner()),
kind: final_adjustment,
}))
.chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment }))
.collect(),
ty,
obligations,
@ -216,10 +213,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
if self.coerce_never {
return success(
vec![Adjustment {
kind: Adjust::NeverToAny,
target: b.to_chalk(self.interner()),
}],
vec![Adjustment { kind: Adjust::NeverToAny, target: b }],
b,
PredicateObligations::new(),
);
@ -241,10 +235,9 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
&& let TyKind::Alias(rustc_type_ir::Opaque, opaque_ty) = b.kind()
&& let SolverDefId::InternedOpaqueTyId(opaque_ty_id) = opaque_ty.def_id
&& !matches!(a.kind(), TyKind::Infer(..) | TyKind::Alias(rustc_type_ir::Opaque, _))
&& let Some(ty) = tait_table.get(&opaque_ty_id.into())
&& let Some(ty) = tait_table.get(&opaque_ty_id)
{
b = ty.to_nextsolver(self.interner());
b = self.table.shallow_resolve(b);
b = self.table.shallow_resolve(*ty);
}
let b = b;
@ -474,7 +467,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
r_borrow_var.unwrap()
};
let derefd_ty_a = Ty::new_ref(
autoderef.table.interner,
autoderef.table.interner(),
r,
referent_ty,
mutbl_b, // [1] above
@ -547,11 +540,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
panic!("expected a ref type, got {:?}", ty);
};
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(
region.to_chalk(self.interner()),
mutbl_b.to_chalk(self.interner()),
)),
target: ty.to_chalk(self.interner()),
kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl_b)),
target: ty,
});
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
@ -655,20 +645,13 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
// implementation. If it happens that this coercion is a function argument,
// the reborrow in coerce_borrowed_ptr will pick it up.
// let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
let mutbl = mutbl_b.to_chalk(self.interner());
let mutbl = mutbl_b;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
kind: Adjust::Deref(None),
target: ty_a.to_chalk(self.interner()),
},
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(
r_borrow.to_chalk(self.interner()),
mutbl,
)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b)
.to_chalk(self.interner()),
kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b),
},
))
}
@ -676,20 +659,16 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
coerce_mutbls(mt_a, mt_b)?;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
kind: Adjust::Deref(None),
target: ty_a.to_chalk(self.interner()),
},
Adjustment {
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b.to_chalk(self.interner()))),
target: Ty::new_ptr(self.interner(), ty_a, mt_b).to_chalk(self.interner()),
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)),
target: Ty::new_ptr(self.interner(), ty_a, mt_b),
},
))
}
_ => None,
};
let coerce_source =
reborrow.as_ref().map_or(source, |(_, r)| r.target.to_nextsolver(self.interner()));
let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target);
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
@ -834,7 +813,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
&mut self,
fn_ty_a: PolyFnSig<'db>,
b: Ty<'db>,
adjustment: Option<Adjust>,
adjustment: Option<Adjust<'db>>,
) -> CoerceResult<'db> {
debug_assert!(self.table.shallow_resolve(b) == b);
@ -849,7 +828,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
b,
adjustment.map(|kind| Adjustment {
kind,
target: Ty::new_fn_ptr(this.interner(), fn_ty_a).to_chalk(this.interner()),
target: Ty::new_fn_ptr(this.interner(), fn_ty_a),
}),
Adjust::Pointer(PointerCast::UnsafeFnPointer),
)
@ -961,9 +940,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
pointer_ty,
b,
[],
Adjust::Pointer(PointerCast::ClosureFnPointer(
safety.to_chalk(self.interner()),
)),
Adjust::Pointer(PointerCast::ClosureFnPointer(safety)),
)
}
_ => self.unify(a, b),
@ -991,11 +968,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
self.unify_and(
a_raw,
b,
[Adjustment {
kind: Adjust::Deref(None),
target: mt_a.ty.to_chalk(self.interner()),
}],
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b.to_chalk(self.interner()))),
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }],
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
)
} else if mt_a.mutbl != mutbl_b {
self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCast::MutToConstPointer))
@ -1011,7 +985,7 @@ pub(crate) enum CoerceNever {
Yes,
}
impl<'db> InferenceContext<'db> {
impl<'db> InferenceContext<'_, 'db> {
/// Attempt to coerce an expression to a type, and return the
/// adjusted type of the expression, if successful.
/// Adjustments are only recorded if the coercion succeeded.
@ -1128,13 +1102,13 @@ impl<'db> InferenceContext<'db> {
// We have a LUB of prev_ty and new_ty, just return it.
Ok(ok) => return Ok(self.table.register_infer_ok(ok)),
Err(_) => (
Some(prev_ty.fn_sig(self.table.interner)),
Some(new_ty.fn_sig(self.table.interner)),
Some(prev_ty.fn_sig(self.table.interner())),
Some(new_ty.fn_sig(self.table.interner())),
),
}
}
(TyKind::Closure(_, args), TyKind::FnDef(..)) => {
let b_sig = new_ty.fn_sig(self.table.interner);
let b_sig = new_ty.fn_sig(self.table.interner());
let a_sig = args.closure_sig_untupled().map_bound(|mut sig| {
sig.safety = b_sig.safety();
sig
@ -1142,7 +1116,7 @@ impl<'db> InferenceContext<'db> {
(Some(a_sig), Some(b_sig))
}
(TyKind::FnDef(..), TyKind::Closure(_, args)) => {
let a_sig = prev_ty.fn_sig(self.table.interner);
let a_sig = prev_ty.fn_sig(self.table.interner());
let b_sig = args.closure_sig_untupled().map_bound(|mut sig| {
sig.safety = a_sig.safety();
sig
@ -1166,36 +1140,30 @@ impl<'db> InferenceContext<'db> {
.map(|ok| self.table.register_infer_ok(ok))?;
// Reify both sides and return the reified fn pointer type.
let fn_ptr = Ty::new_fn_ptr(self.table.interner, sig);
let fn_ptr = Ty::new_fn_ptr(self.table.interner(), sig);
let prev_adjustment = match prev_ty.kind() {
TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer(
a_sig.safety().to_chalk(self.table.interner),
)),
TyKind::Closure(..) => {
Adjust::Pointer(PointerCast::ClosureFnPointer(a_sig.safety()))
}
TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer),
_ => panic!("should not try to coerce a {prev_ty:?} to a fn pointer"),
};
let next_adjustment = match new_ty.kind() {
TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer(
b_sig.safety().to_chalk(self.table.interner),
)),
TyKind::Closure(..) => {
Adjust::Pointer(PointerCast::ClosureFnPointer(b_sig.safety()))
}
TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer),
_ => panic!("should not try to coerce a {new_ty:?} to a fn pointer"),
};
for &expr in exprs {
self.write_expr_adj(
expr,
Box::new([Adjustment {
kind: prev_adjustment.clone(),
target: fn_ptr.to_chalk(self.table.interner),
}]),
Box::new([Adjustment { kind: prev_adjustment.clone(), target: fn_ptr }]),
);
}
self.write_expr_adj(
new,
Box::new([Adjustment {
kind: next_adjustment,
target: fn_ptr.to_chalk(self.table.interner),
}]),
Box::new([Adjustment { kind: next_adjustment, target: fn_ptr }]),
);
return Ok(fn_ptr);
}
@ -1382,7 +1350,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
/// if necessary.
pub(crate) fn coerce(
&mut self,
icx: &mut InferenceContext<'db>,
icx: &mut InferenceContext<'_, 'db>,
cause: &ObligationCause,
expression: ExprId,
expression_ty: Ty<'db>,
@ -1404,19 +1372,12 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
/// removing a `;`).
pub(crate) fn coerce_forced_unit(
&mut self,
icx: &mut InferenceContext<'db>,
icx: &mut InferenceContext<'_, 'db>,
expr: ExprId,
cause: &ObligationCause,
label_unit_as_expected: bool,
) {
self.coerce_inner(
icx,
cause,
expr,
icx.result.standard_types.unit.to_nextsolver(icx.table.interner),
true,
label_unit_as_expected,
)
self.coerce_inner(icx, cause, expr, icx.types.unit, true, label_unit_as_expected)
}
/// The inner coercion "engine". If `expression` is `None`, this
@ -1424,7 +1385,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
/// `Nil`.
pub(crate) fn coerce_inner(
&mut self,
icx: &mut InferenceContext<'db>,
icx: &mut InferenceContext<'_, 'db>,
cause: &ObligationCause,
expression: ExprId,
mut expression_ty: Ty<'db>,
@ -1533,20 +1494,14 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
// emit or provide suggestions on how to fix the initial error.
icx.set_tainted_by_errors();
self.final_ty = Some(Ty::new_error(icx.table.interner, ErrorGuaranteed));
self.final_ty = Some(icx.types.error);
icx.result.type_mismatches.insert(
expression.into(),
if label_expression_as_expected {
TypeMismatch {
expected: found.to_chalk(icx.table.interner),
actual: expected.to_chalk(icx.table.interner),
}
TypeMismatch { expected: found, actual: expected }
} else {
TypeMismatch {
expected: expected.to_chalk(icx.table.interner),
actual: found.to_chalk(icx.table.interner),
}
TypeMismatch { expected, actual: found }
},
);
}
@ -1555,14 +1510,14 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
self.pushed += 1;
}
pub(crate) fn complete(self, icx: &mut InferenceContext<'db>) -> Ty<'db> {
pub(crate) fn complete(self, icx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
if let Some(final_ty) = self.final_ty {
final_ty
} else {
// If we only had inputs that were of type `!` (or no
// inputs at all), then the final type is `!`.
assert_eq!(self.pushed, 0);
icx.result.standard_types.never.to_nextsolver(icx.table.interner)
icx.types.never
}
}
}
@ -1570,7 +1525,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
pub fn could_coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> bool {
coerce(db, env, tys).is_ok()
}
@ -1578,12 +1533,11 @@ pub fn could_coerce<'db>(
fn coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
) -> Result<(Vec<Adjustment>, crate::Ty), TypeError<DbInterner<'db>>> {
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
let mut table = InferenceTable::new(db, env);
let vars = table.fresh_subst(tys.binders.as_slice(Interner));
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let interner = table.interner();
let ((ty1_with_vars, ty2_with_vars), vars) = table.infer_ctxt.instantiate_canonical(tys);
let cause = ObligationCause::new();
// FIXME: Target features.
@ -1597,36 +1551,67 @@ fn coerce<'db>(
use_lub: false,
target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No),
};
let InferOk { value: (adjustments, ty), obligations } = coerce.coerce(
ty1_with_vars.to_nextsolver(coerce.table.interner),
ty2_with_vars.to_nextsolver(coerce.table.interner),
)?;
let InferOk { value: (adjustments, ty), obligations } =
coerce.coerce(ty1_with_vars, ty2_with_vars)?;
table.register_predicates(obligations);
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
let find_var = |iv| {
vars.iter(Interner).position(|v| match v.interned() {
chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
} == Some(iv))
let mut fallback_ty = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_type().is_some_and(|ty| match ty.kind() {
TyKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Ty::new_error(interner, ErrorGuaranteed),
|i| {
Ty::new_bound(
interner,
debruijn,
BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) },
)
},
)
};
let fallback = |iv, kind, binder| match kind {
chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv).map_or_else(
|| chalk_ir::TyKind::Error.intern(Interner).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_ty(Interner).cast(Interner),
),
chalk_ir::VariableKind::Lifetime => find_var(iv).map_or_else(
|| crate::LifetimeData::Error.intern(Interner).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner),
),
chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or_else(
|| crate::unknown_const(ty.clone()).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_const(Interner, ty.clone()).cast(Interner),
),
let mut fallback_const = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_const().is_some_and(|ty| match ty.kind() {
ConstKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Const::new_error(interner, ErrorGuaranteed),
|i| Const::new_bound(interner, debruijn, BoundConst { var: BoundVar::from_usize(i) }),
)
};
let mut fallback_region = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_region().is_some_and(|ty| match ty.kind() {
RegionKind::ReVar(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Region::error(interner),
|i| {
Region::new_bound(
interner,
debruijn,
BoundRegion { kind: BoundRegionKind::Anon, var: BoundVar::from_usize(i) },
)
},
)
};
// FIXME also map the types in the adjustments
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferenceTable`.
Ok((adjustments, table.resolve_with_fallback(ty.to_chalk(table.interner), &fallback)))
let ty = table.resolve_with_fallback(
ty,
&mut fallback_ty,
&mut fallback_const,
&mut fallback_region,
);
Ok((adjustments, ty))
}

View file

@ -12,11 +12,11 @@ use hir_def::expr_store::path::Path;
use hir_def::{hir::ExprOrPatId, resolver::Resolver};
use la_arena::{Idx, RawIdx};
use crate::lower::LifetimeElisionKind;
use crate::{
InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic,
InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnostic,
db::HirDatabase,
lower::path::{PathDiagnosticCallback, PathLoweringContext},
lower_nextsolver::path::{PathDiagnosticCallback, PathLoweringContext},
lower_nextsolver::{LifetimeElisionKind, TyLoweringContext},
};
// Unfortunately, this struct needs to use interior mutability (but we encapsulate it)
@ -24,10 +24,10 @@ use crate::{
// to our resolver and so we cannot have mutable reference, but we really want to have
// ability to dispatch diagnostics during this work otherwise the code becomes a complete mess.
#[derive(Debug, Default, Clone)]
pub(super) struct Diagnostics(RefCell<Vec<InferenceDiagnostic>>);
pub(super) struct Diagnostics<'db>(RefCell<Vec<InferenceDiagnostic<'db>>>);
impl Diagnostics {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic) {
impl<'db> Diagnostics<'db> {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) {
self.0.borrow_mut().push(diagnostic);
}
@ -41,32 +41,32 @@ impl Diagnostics {
);
}
pub(super) fn finish(self) -> Vec<InferenceDiagnostic> {
pub(super) fn finish(self) -> Vec<InferenceDiagnostic<'db>> {
self.0.into_inner()
}
}
pub(crate) struct PathDiagnosticCallbackData<'a> {
pub(crate) struct PathDiagnosticCallbackData<'a, 'db> {
node: ExprOrPatId,
diagnostics: &'a Diagnostics,
diagnostics: &'a Diagnostics<'db>,
}
pub(super) struct InferenceTyLoweringContext<'a> {
ctx: TyLoweringContext<'a>,
diagnostics: &'a Diagnostics,
pub(super) struct InferenceTyLoweringContext<'db, 'a> {
ctx: TyLoweringContext<'db, 'a>,
diagnostics: &'a Diagnostics<'db>,
source: InferenceTyDiagnosticSource,
}
impl<'a> InferenceTyLoweringContext<'a> {
impl<'db, 'a> InferenceTyLoweringContext<'db, 'a> {
#[inline]
pub(super) fn new(
db: &'a dyn HirDatabase,
resolver: &'a Resolver<'_>,
db: &'db dyn HirDatabase,
resolver: &'a Resolver<'db>,
store: &'a ExpressionStore,
diagnostics: &'a Diagnostics,
diagnostics: &'a Diagnostics<'db>,
source: InferenceTyDiagnosticSource,
generic_def: GenericDefId,
lifetime_elision: LifetimeElisionKind,
lifetime_elision: LifetimeElisionKind<'db>,
) -> Self {
Self {
ctx: TyLoweringContext::new(db, resolver, store, generic_def, lifetime_elision),
@ -80,7 +80,7 @@ impl<'a> InferenceTyLoweringContext<'a> {
&'b mut self,
path: &'b Path,
node: ExprOrPatId,
) -> PathLoweringContext<'b, 'a> {
) -> PathLoweringContext<'b, 'a, 'db> {
let on_diagnostic = PathDiagnosticCallback {
data: Either::Right(PathDiagnosticCallbackData { diagnostics: self.diagnostics, node }),
callback: |data, _, diag| {
@ -96,7 +96,7 @@ impl<'a> InferenceTyLoweringContext<'a> {
pub(super) fn at_path_forget_diagnostics<'b>(
&'b mut self,
path: &'b Path,
) -> PathLoweringContext<'b, 'a> {
) -> PathLoweringContext<'b, 'a, 'db> {
let on_diagnostic = PathDiagnosticCallback {
data: Either::Right(PathDiagnosticCallbackData {
diagnostics: self.diagnostics,
@ -113,8 +113,8 @@ impl<'a> InferenceTyLoweringContext<'a> {
}
}
impl<'a> Deref for InferenceTyLoweringContext<'a> {
type Target = TyLoweringContext<'a>;
impl<'db, 'a> Deref for InferenceTyLoweringContext<'db, 'a> {
type Target = TyLoweringContext<'db, 'a>;
#[inline]
fn deref(&self) -> &Self::Target {
@ -122,14 +122,14 @@ impl<'a> Deref for InferenceTyLoweringContext<'a> {
}
}
impl DerefMut for InferenceTyLoweringContext<'_> {
impl DerefMut for InferenceTyLoweringContext<'_, '_> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.ctx
}
}
impl Drop for InferenceTyLoweringContext<'_> {
impl Drop for InferenceTyLoweringContext<'_, '_> {
#[inline]
fn drop(&mut self) {
self.diagnostics

File diff suppressed because it is too large Load diff

View file

@ -28,7 +28,7 @@ pub(crate) enum DivergingFallbackBehavior {
ToNever,
}
impl<'db> InferenceContext<'db> {
impl<'db> InferenceContext<'_, 'db> {
pub(super) fn type_inference_fallback(&mut self) {
debug!(
"type-inference-fallback start obligations: {:#?}",
@ -324,7 +324,7 @@ impl<'db> InferenceContext<'db> {
FxHashMap::with_capacity_and_hasher(diverging_vids.len(), FxBuildHasher);
for &diverging_vid in &diverging_vids {
let diverging_ty = Ty::new_var(self.table.interner, diverging_vid);
let diverging_ty = Ty::new_var(self.interner(), diverging_vid);
let root_vid = self.table.infer_ctxt.root_var(diverging_vid);
let can_reach_non_diverging = Dfs::new(&coercion_graph, root_vid.as_u32().into())
.iter(&coercion_graph)

View file

@ -1,7 +1,6 @@
//! Finds if an expression is an immutable context or a mutable context, which is used in selecting
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
use chalk_ir::{Mutability, cast::Cast};
use hir_def::{
hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement,
@ -11,14 +10,19 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::IntoKind;
use crate::next_solver::infer::traits::{Obligation, ObligationCause};
use crate::next_solver::{GenericArgs, TraitRef};
use crate::{
Adjust, Adjustment, AutoBorrow, Interner, OverloadedDeref, TyBuilder, TyKind,
Adjust, Adjustment, AutoBorrow, OverloadedDeref,
infer::{Expectation, InferenceContext, expr::ExprIsRead},
lower::lower_to_chalk_mutability,
lower_nextsolver::lower_mutability,
next_solver::TyKind,
};
impl InferenceContext<'_> {
impl<'db> InferenceContext<'_, 'db> {
pub(crate) fn infer_mut_body(&mut self) {
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
}
@ -141,8 +145,8 @@ impl InferenceContext<'_> {
target,
}) = base_adjustments
{
if let TyKind::Ref(_, _, ty) = target.kind(Interner) {
base_ty = Some(ty.clone());
if let TyKind::Ref(_, ty, _) = target.kind() {
base_ty = Some(ty);
}
*mutability = Mutability::Mut;
}
@ -150,15 +154,24 @@ impl InferenceContext<'_> {
// Apply `IndexMut` obligation for non-assignee expr
if let Some(base_ty) = base_ty {
let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) {
ty.clone()
*ty
} else {
self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes)
};
let trait_ref = TyBuilder::trait_ref(self.db, index_trait)
.push(base_ty)
.fill(|_| index_ty.clone().cast(Interner))
.build();
self.push_obligation(trait_ref.cast(Interner));
let trait_ref = TraitRef::new(
self.interner(),
index_trait.into(),
GenericArgs::new_from_iter(
self.interner(),
[base_ty.into(), index_ty.into()],
),
);
self.table.register_predicate(Obligation::new(
self.interner(),
ObligationCause::new(),
self.table.trait_env.env,
trait_ref,
));
}
}
self.infer_mut_expr(base, mutability);
@ -173,8 +186,8 @@ impl InferenceContext<'_> {
{
let ty = self.result.type_of_expr.get(*expr);
let is_mut_ptr = ty.is_some_and(|ty| {
let ty = self.table.resolve_ty_shallow(ty);
matches!(ty.kind(Interner), chalk_ir::TyKind::Raw(Mutability::Mut, _))
let ty = self.table.shallow_resolve(*ty);
matches!(ty.kind(), TyKind::RawPtr(_, Mutability::Mut))
});
if is_mut_ptr {
mutability = Mutability::Not;
@ -200,7 +213,7 @@ impl InferenceContext<'_> {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::Ref { expr, rawness: _, mutability } => {
let mutability = lower_to_chalk_mutability(*mutability);
let mutability = lower_mutability(*mutability);
self.infer_mut_expr(*expr, mutability);
}
Expr::BinaryOp { lhs, rhs, op: Some(BinaryOp::Assignment { .. }) } => {

View file

@ -8,37 +8,35 @@ use hir_def::{
hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId},
};
use hir_expand::name::Name;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use stdx::TupleExt;
use crate::infer::AllowTwoPhase;
use crate::next_solver::mapping::{ChalkToNextSolver, NextSolverToChalk};
use crate::{
DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
TyBuilder, TyExt, TyKind,
DeclContext, DeclOrigin, InferenceDiagnostic,
consteval::{self, try_const_usize, usize_const},
infer::{
BindingMode, Expectation, InferenceContext, TypeMismatch, coerce::CoerceNever,
expr::ExprIsRead,
AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch,
coerce::CoerceNever, expr::ExprIsRead,
},
lower::lower_to_chalk_mutability,
primitive::UintTy,
static_lifetime,
lower_nextsolver::lower_mutability,
next_solver::{GenericArgs, Ty, TyKind},
};
impl InferenceContext<'_> {
impl<'db> InferenceContext<'_, 'db> {
/// Infers type for tuple struct pattern or its corresponding assignee expression.
///
/// Ellipses found in the original pattern or expression must be filtered out.
pub(super) fn infer_tuple_struct_pat_like(
&mut self,
path: Option<&Path>,
expected: &Ty,
expected: Ty<'db>,
default_bm: BindingMode,
id: PatId,
ellipsis: Option<u32>,
subs: &[PatId],
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let (ty, def) = self.resolve_variant(id.into(), path, true);
let var_data = def.map(|it| it.fields(self.db));
if let Some(variant) = def {
@ -56,12 +54,12 @@ impl InferenceContext<'_> {
}
}
self.unify(&ty, expected);
self.unify(ty, expected);
match def {
_ if subs.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
let field_types = self.db.field_types_ns(def);
let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
@ -85,10 +83,10 @@ impl InferenceContext<'_> {
{
// FIXME(DIAGNOSE): private tuple field
}
let f = field_types[local_id].clone();
let f = field_types[local_id];
let expected_ty = match substs {
Some(substs) => f.substitute(Interner, substs),
None => f.substitute(Interner, &Substitution::empty(Interner)),
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
};
self.process_remote_user_written_ty(expected_ty)
}
@ -96,13 +94,13 @@ impl InferenceContext<'_> {
}
};
self.infer_pat(subpat, &expected_ty, default_bm, decl);
self.infer_pat(subpat, expected_ty, default_bm, decl);
}
}
None => {
let err_ty = self.err_ty();
for &inner in subs {
self.infer_pat(inner, &err_ty, default_bm, decl);
self.infer_pat(inner, err_ty, default_bm, decl);
}
}
}
@ -114,23 +112,23 @@ impl InferenceContext<'_> {
pub(super) fn infer_record_pat_like(
&mut self,
path: Option<&Path>,
expected: &Ty,
expected: Ty<'db>,
default_bm: BindingMode,
id: PatId,
subs: impl ExactSizeIterator<Item = (Name, PatId)>,
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let (ty, def) = self.resolve_variant(id.into(), path, false);
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
self.unify(&ty, expected);
self.unify(ty, expected);
match def {
_ if subs.len() == 0 => {}
Some(def) => {
let field_types = self.db.field_types(def);
let field_types = self.db.field_types_ns(def);
let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
@ -149,10 +147,10 @@ impl InferenceContext<'_> {
variant: def,
});
}
let f = field_types[local_id].clone();
let f = field_types[local_id];
let expected_ty = match substs {
Some(substs) => f.substitute(Interner, substs),
None => f.substitute(Interner, &Substitution::empty(Interner)),
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
};
self.process_remote_user_written_ty(expected_ty)
}
@ -167,13 +165,13 @@ impl InferenceContext<'_> {
}
};
self.infer_pat(inner, &expected_ty, default_bm, decl);
self.infer_pat(inner, expected_ty, default_bm, decl);
}
}
None => {
let err_ty = self.err_ty();
for (_, inner) in subs {
self.infer_pat(inner, &err_ty, default_bm, decl);
self.infer_pat(inner, err_ty, default_bm, decl);
}
}
}
@ -186,16 +184,16 @@ impl InferenceContext<'_> {
/// Ellipses found in the original pattern or expression must be filtered out.
pub(super) fn infer_tuple_pat_like(
&mut self,
expected: &Ty,
expected: Ty<'db>,
default_bm: BindingMode,
ellipsis: Option<u32>,
subs: &[PatId],
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let expected = self.table.structurally_resolve_type(expected);
let expectations = match expected.as_tuple() {
Some(parameters) => parameters.as_slice(Interner),
_ => &[],
let expectations = match expected.kind() {
TyKind::Tuple(parameters) => parameters,
_ => self.types.empty_tys,
};
let ((pre, post), n_uncovered_patterns) = match ellipsis {
@ -204,10 +202,8 @@ impl InferenceContext<'_> {
}
None => ((subs, &[][..]), 0),
};
let mut expectations_iter = expectations
.iter()
.map(|a| a.assert_ty_ref(Interner).clone())
.chain(repeat_with(|| self.table.new_type_var()));
let mut expectations_iter =
expectations.iter().chain(repeat_with(|| self.table.next_ty_var()));
let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
@ -215,31 +211,35 @@ impl InferenceContext<'_> {
// Process pre
for (ty, pat) in inner_tys.iter_mut().zip(pre) {
*ty = self.infer_pat(*pat, ty, default_bm, decl);
*ty = self.infer_pat(*pat, *ty, default_bm, decl);
}
// Process post
for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
*ty = self.infer_pat(*pat, ty, default_bm, decl);
*ty = self.infer_pat(*pat, *ty, default_bm, decl);
}
TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
.intern(Interner)
Ty::new_tup_from_iter(self.interner(), inner_tys.into_iter())
}
/// The resolver needs to be updated to the surrounding expression when inside assignment
/// (because there, `Pat::Path` can refer to a variable).
pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty, decl: Option<DeclContext>) {
pub(super) fn infer_top_pat(
&mut self,
pat: PatId,
expected: Ty<'db>,
decl: Option<DeclContext>,
) {
self.infer_pat(pat, expected, BindingMode::default(), decl);
}
fn infer_pat(
&mut self,
pat: PatId,
expected: &Ty,
expected: Ty<'db>,
mut default_bm: BindingMode,
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let mut expected = self.table.structurally_resolve_type(expected);
if matches!(&self.body[pat], Pat::Ref { .. }) || self.inside_assignment {
@ -251,9 +251,9 @@ impl InferenceContext<'_> {
default_bm = BindingMode::Move;
} else if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone());
expected = self.table.structurally_resolve_type(inner);
while let TyKind::Ref(_lifetime, inner, mutability) = expected.kind() {
pat_adjustments.push(expected);
expected = self.table.try_structurally_resolve_type(inner);
default_bm = match default_bm {
BindingMode::Move => BindingMode::Ref(mutability),
BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
@ -273,25 +273,21 @@ impl InferenceContext<'_> {
let ty = match &self.body[pat] {
Pat::Tuple { args, ellipsis } => {
self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args, decl)
self.infer_tuple_pat_like(expected, default_bm, *ellipsis, args, decl)
}
Pat::Or(pats) => {
for pat in pats.iter() {
self.infer_pat(*pat, &expected, default_bm, decl);
self.infer_pat(*pat, expected, default_bm, decl);
}
expected.clone()
expected
}
&Pat::Ref { pat, mutability } => {
self.infer_ref_pat(pat, lower_mutability(mutability), expected, default_bm, decl)
}
&Pat::Ref { pat, mutability } => self.infer_ref_pat(
pat,
lower_to_chalk_mutability(mutability),
&expected,
default_bm,
decl,
),
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
.infer_tuple_struct_pat_like(
p.as_deref(),
&expected,
expected,
default_bm,
pat,
*ellipsis,
@ -300,29 +296,26 @@ impl InferenceContext<'_> {
),
Pat::Record { path: p, args: fields, ellipsis: _ } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs, decl)
self.infer_record_pat_like(p.as_deref(), expected, default_bm, pat, subs, decl)
}
Pat::Path(path) => {
let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty());
let ty_inserted_vars = self.insert_type_vars_shallow(ty.clone());
let ty_inserted_vars = self.insert_type_vars_shallow(ty);
match self.coerce(
pat.into(),
expected.to_nextsolver(self.table.interner),
ty_inserted_vars.to_nextsolver(self.table.interner),
expected,
ty_inserted_vars,
AllowTwoPhase::No,
CoerceNever::Yes,
) {
Ok(coerced_ty) => {
self.write_pat_ty(pat, coerced_ty.to_chalk(self.table.interner));
self.write_pat_ty(pat, coerced_ty);
return self.pat_ty_after_adjustment(pat);
}
Err(_) => {
self.result.type_mismatches.insert(
pat.into(),
TypeMismatch {
expected: expected.clone(),
actual: ty_inserted_vars.clone(),
},
TypeMismatch { expected, actual: ty_inserted_vars },
);
self.write_pat_ty(pat, ty);
// We return `expected` to prevent cascading errors. I guess an alternative is to
@ -332,81 +325,77 @@ impl InferenceContext<'_> {
}
}
Pat::Bind { id, subpat } => {
return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected, decl);
return self.infer_bind_pat(pat, *id, default_bm, *subpat, expected, decl);
}
Pat::Slice { prefix, slice, suffix } => {
self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm, decl)
self.infer_slice_pat(expected, prefix, *slice, suffix, default_bm, decl)
}
Pat::Wild => expected.clone(),
Pat::Wild => expected,
Pat::Range { .. } => {
// FIXME: do some checks here.
expected.clone()
expected
}
&Pat::Lit(expr) => {
// Don't emit type mismatches again, the expression lowering already did that.
let ty = self.infer_lit_pat(expr, &expected);
let ty = self.infer_lit_pat(expr, expected);
self.write_pat_ty(pat, ty);
return self.pat_ty_after_adjustment(pat);
}
Pat::Box { inner } => match self.resolve_boxed_box() {
Some(box_adt) => {
let (inner_ty, alloc_ty) = match expected.as_adt() {
Some((adt, subst)) if adt == box_adt => (
subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
subst.as_slice(Interner).get(1).and_then(|a| a.ty(Interner).cloned()),
),
_ => (self.result.standard_types.unknown.clone(), None),
Some((adt, subst)) if adt == box_adt => {
(subst.type_at(0), subst.as_slice().get(1).and_then(|a| a.as_type()))
}
_ => (self.types.error, None),
};
let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm, decl);
let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
if let Some(alloc_ty) = alloc_ty {
b = b.push(alloc_ty);
}
b.fill_with_defaults(self.db, || self.table.new_type_var()).build()
let inner_ty = self.infer_pat(*inner, inner_ty, default_bm, decl);
Ty::new_adt(
self.interner(),
box_adt,
GenericArgs::fill_with_defaults(
self.interner(),
box_adt.into(),
std::iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)),
|_, _, id, _| self.table.next_var_for_param(id),
),
)
}
None => self.err_ty(),
},
Pat::ConstBlock(expr) => {
let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false);
let result = self.infer_expr(
*expr,
&Expectation::has_type(expected.clone()),
ExprIsRead::Yes,
);
let result =
self.infer_expr(*expr, &Expectation::has_type(expected), ExprIsRead::Yes);
self.inside_assignment = old_inside_assign;
result
}
Pat::Expr(expr) => {
let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false);
// LHS of assignment doesn't constitute reads.
let result = self.infer_expr_coerce(
*expr,
&Expectation::has_type(expected.clone()),
ExprIsRead::No,
);
let result =
self.infer_expr_coerce(*expr, &Expectation::has_type(expected), ExprIsRead::No);
// We are returning early to avoid the unifiability check below.
let lhs_ty = self.insert_type_vars_shallow(result);
let ty = match self.coerce(
pat.into(),
expected.to_nextsolver(self.table.interner),
lhs_ty.to_nextsolver(self.table.interner),
expected,
lhs_ty,
AllowTwoPhase::No,
CoerceNever::Yes,
) {
Ok(ty) => ty.to_chalk(self.table.interner),
Ok(ty) => ty,
Err(_) => {
self.result.type_mismatches.insert(
pat.into(),
TypeMismatch { expected: expected.clone(), actual: lhs_ty.clone() },
);
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected, actual: lhs_ty });
// `rhs_ty` is returned so no further type mismatches are
// reported because of this mismatch.
expected
}
};
self.write_pat_ty(pat, ty.clone());
self.write_pat_ty(pat, ty);
self.inside_assignment = old_inside_assign;
return ty;
}
@ -415,46 +404,43 @@ impl InferenceContext<'_> {
// use a new type variable if we got error type here
let ty = self.insert_type_vars_shallow(ty);
// FIXME: This never check is odd, but required with out we do inference right now
if !expected.is_never() && !self.unify(&ty, &expected) {
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
if !expected.is_never() && !self.unify(ty, expected) {
self.result.type_mismatches.insert(pat.into(), TypeMismatch { expected, actual: ty });
}
self.write_pat_ty(pat, ty);
self.pat_ty_after_adjustment(pat)
}
fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty {
self.result
fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty<'db> {
*self
.result
.pat_adjustments
.get(&pat)
.and_then(|it| it.first())
.unwrap_or(&self.result.type_of_pat[pat])
.clone()
}
fn infer_ref_pat(
&mut self,
inner_pat: PatId,
mutability: Mutability,
expected: &Ty,
expected: Ty<'db>,
default_bm: BindingMode,
decl: Option<DeclContext>,
) -> Ty {
let (expectation_type, expectation_lt) = match expected.as_reference() {
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime),
None => {
let inner_ty = self.table.new_type_var();
let inner_lt = self.table.new_lifetime_var();
let ref_ty =
TyKind::Ref(mutability, inner_lt.clone(), inner_ty.clone()).intern(Interner);
) -> Ty<'db> {
let (expectation_type, expectation_lt) = match expected.kind() {
TyKind::Ref(lifetime, inner_ty, _exp_mut) => (inner_ty, lifetime),
_ => {
let inner_ty = self.table.next_ty_var();
let inner_lt = self.table.next_region_var();
let ref_ty = Ty::new_ref(self.interner(), inner_lt, inner_ty, mutability);
// Unification failure will be reported by the caller.
self.unify(&ref_ty, expected);
self.unify(ref_ty, expected);
(inner_ty, inner_lt)
}
};
let subty = self.infer_pat(inner_pat, &expectation_type, default_bm, decl);
TyKind::Ref(mutability, expectation_lt, subty).intern(Interner)
let subty = self.infer_pat(inner_pat, expectation_type, default_bm, decl);
Ty::new_ref(self.interner(), expectation_lt, subty, mutability)
}
fn infer_bind_pat(
@ -463,9 +449,9 @@ impl InferenceContext<'_> {
binding: BindingId,
default_bm: BindingMode,
subpat: Option<PatId>,
expected: &Ty,
expected: Ty<'db>,
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let Binding { mode, .. } = self.body[binding];
let mode = if mode == BindingAnnotation::Unannotated {
default_bm
@ -476,31 +462,31 @@ impl InferenceContext<'_> {
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, expected, default_bm, decl),
None => expected.clone(),
None => expected,
};
let inner_ty = self.insert_type_vars_shallow(inner_ty);
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
let inner_lt = self.table.new_lifetime_var();
TyKind::Ref(mutability, inner_lt, inner_ty.clone()).intern(Interner)
let inner_lt = self.table.next_region_var();
Ty::new_ref(self.interner(), inner_lt, inner_ty, mutability)
}
BindingMode::Move => inner_ty.clone(),
BindingMode::Move => inner_ty,
};
self.write_pat_ty(pat, inner_ty.clone());
self.write_pat_ty(pat, inner_ty);
self.write_binding_ty(binding, bound_ty);
inner_ty
}
fn infer_slice_pat(
&mut self,
expected: &Ty,
expected: Ty<'db>,
prefix: &[PatId],
slice: &Option<PatId>,
slice: Option<PatId>,
suffix: &[PatId],
default_bm: BindingMode,
decl: Option<DeclContext>,
) -> Ty {
) -> Ty<'db> {
let expected = self.table.structurally_resolve_type(expected);
// If `expected` is an infer ty, we try to equate it to an array if the given pattern
@ -510,56 +496,61 @@ impl InferenceContext<'_> {
&& let Some(resolved_array_ty) =
self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice)
{
self.unify(&expected, &resolved_array_ty);
self.unify(expected, resolved_array_ty);
}
let expected = self.table.structurally_resolve_type(&expected);
let elem_ty = match expected.kind(Interner) {
TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
let expected = self.table.try_structurally_resolve_type(expected);
let elem_ty = match expected.kind() {
TyKind::Array(st, _) | TyKind::Slice(st) => st,
_ => self.err_ty(),
};
for &pat_id in prefix.iter().chain(suffix.iter()) {
self.infer_pat(pat_id, &elem_ty, default_bm, decl);
self.infer_pat(pat_id, elem_ty, default_bm, decl);
}
if let &Some(slice_pat_id) = slice {
let rest_pat_ty = match expected.kind(Interner) {
if let Some(slice_pat_id) = slice {
let rest_pat_ty = match expected.kind() {
TyKind::Array(_, length) => {
let len = try_const_usize(self.db, length);
let len =
len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))
Ty::new_array_with_const_len(
self.interner(),
elem_ty,
usize_const(self.db, len, self.resolver.krate()),
)
}
_ => TyKind::Slice(elem_ty.clone()),
}
.intern(Interner);
self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm, decl);
_ => Ty::new_slice(self.interner(), elem_ty),
};
self.infer_pat(slice_pat_id, rest_pat_ty, default_bm, decl);
}
match expected.kind(Interner) {
TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
_ => TyKind::Slice(elem_ty),
match expected.kind() {
TyKind::Array(_, const_) => {
Ty::new_array_with_const_len(self.interner(), elem_ty, const_)
}
_ => Ty::new_slice(self.interner(), elem_ty),
}
.intern(Interner)
}
fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty {
fn infer_lit_pat(&mut self, expr: ExprId, expected: Ty<'db>) -> Ty<'db> {
// Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
if let Expr::Literal(Literal::ByteString(_)) = self.body[expr]
&& let Some((inner, ..)) = expected.as_reference()
&& let TyKind::Ref(_, inner, _) = expected.kind()
{
let inner = self.table.structurally_resolve_type(inner);
if matches!(inner.kind(Interner), TyKind::Slice(_)) {
let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner);
self.write_expr_ty(expr, ty.clone());
let inner = self.table.try_structurally_resolve_type(inner);
if matches!(inner.kind(), TyKind::Slice(_)) {
let elem_ty = self.types.u8;
let slice_ty = Ty::new_slice(self.interner(), elem_ty);
let ty =
Ty::new_ref(self.interner(), self.types.re_static, slice_ty, Mutability::Not);
self.write_expr_ty(expr, ty);
return ty;
}
}
self.infer_expr(expr, &Expectation::has_type(expected.clone()), ExprIsRead::Yes)
self.infer_expr(expr, &Expectation::has_type(expected), ExprIsRead::Yes)
}
fn is_non_ref_pat(&mut self, body: &hir_def::expr_store::Body, pat: PatId) -> bool {
@ -593,17 +584,17 @@ impl InferenceContext<'_> {
&mut self,
before: &[PatId],
suffix: &[PatId],
slice: &Option<PatId>,
) -> Option<Ty> {
if !slice.is_none() {
slice: Option<PatId>,
) -> Option<Ty<'db>> {
if slice.is_some() {
return None;
}
let len = before.len() + suffix.len();
let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db));
let elem_ty = self.table.new_type_var();
let array_ty = TyKind::Array(elem_ty, size).intern(Interner);
let elem_ty = self.table.next_ty_var();
let array_ty = Ty::new_array_with_const_len(self.interner(), elem_ty, size);
Some(array_ty)
}

View file

@ -1,52 +1,50 @@
//! Path expression resolution.
use chalk_ir::cast::Cast;
use hir_def::{
AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup,
expr_store::path::{Path, PathSegment},
resolver::{ResolveValueResult, TypeNs, ValueNs},
};
use hir_expand::name::Name;
use rustc_type_ir::inherent::{SliceLike, Ty as _};
use stdx::never;
use crate::{
InferenceDiagnostic, Interner, LifetimeElisionKind, Substitution, TraitRef, TraitRefExt, Ty,
TyBuilder, TyExt, TyKind, ValueTyDefId,
builder::ParamKind,
consteval, error_lifetime,
InferenceDiagnostic, ValueTyDefId,
generics::generics,
infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext,
lower_nextsolver::LifetimeElisionKind,
method_resolution::{self, VisibleFromModule},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
GenericArg, GenericArgs, TraitRef, Ty,
infer::traits::{Obligation, ObligationCause},
},
to_chalk_trait_id,
};
use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource};
impl<'db> InferenceContext<'db> {
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
impl<'db> InferenceContext<'_, 'db> {
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty<'db>> {
let (value_def, generic_def, substs) = match self.resolve_value_path(path, id)? {
ValuePathResolution::GenericDef(value_def, generic_def, substs) => {
(value_def, generic_def, substs)
}
ValuePathResolution::NonGeneric(ty) => return Some(ty),
};
let substs =
self.process_remote_user_written_ty::<_, crate::next_solver::GenericArgs<'db>>(substs);
let args = self.process_remote_user_written_ty(substs);
self.add_required_obligations_for_value_path(generic_def, &substs);
self.add_required_obligations_for_value_path(generic_def, args);
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self.db.value_ty(value_def)?.instantiate(interner, args).to_chalk(interner);
let ty = self.db.value_ty(value_def)?.instantiate(self.interner(), args);
let ty = self.process_remote_user_written_ty(ty);
Some(ty)
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
fn resolve_value_path(
&mut self,
path: &Path,
id: ExprOrPatId,
) -> Option<ValuePathResolution<'db>> {
let (value, self_subst) = self.resolve_value_path_inner(path, id, false)?;
let value_def: ValueTyDefId = match value {
@ -65,7 +63,7 @@ impl<'db> InferenceContext<'db> {
}
ValueNs::LocalBinding(pat) => {
return match self.result.type_of_binding.get(pat) {
Some(ty) => Some(ValuePathResolution::NonGeneric(ty.clone())),
Some(ty) => Some(ValuePathResolution::NonGeneric(*ty)),
None => {
never!("uninferred pattern?");
None
@ -73,17 +71,12 @@ impl<'db> InferenceContext<'db> {
};
}
ValueNs::ImplSelf(impl_id) => {
let generics = crate::generics::generics(self.db, impl_id.into());
let interner = DbInterner::new_with(self.db, None, None);
let substs = generics.placeholder_subst(self.db);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty =
self.db.impl_self_ty(impl_id).instantiate(interner, args).to_chalk(interner);
let ty = self.db.impl_self_ty(impl_id).instantiate_identity();
return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
Some(ValuePathResolution::GenericDef(
struct_id.into(),
struct_id.into(),
substs.clone(),
substs,
))
} else {
// FIXME: report error, invalid Self reference
@ -91,67 +84,42 @@ impl<'db> InferenceContext<'db> {
};
}
ValueNs::GenericParam(it) => {
return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)));
return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty_ns(it)));
}
};
let generic_def = value_def.to_generic_def_id(self.db);
if let GenericDefId::StaticId(_) = generic_def {
let interner = DbInterner::new_with(self.db, None, None);
// `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
let ty = self.db.value_ty(value_def)?.skip_binder().to_chalk(interner);
let ty = self.db.value_ty(value_def)?.skip_binder();
return Some(ValuePathResolution::NonGeneric(ty));
};
let substs = self.with_body_ty_lowering(|ctx| {
let mut path_ctx = ctx.at_path(path, id);
let last_segment = path.segments().len().checked_sub(1);
if let Some(last_segment) = last_segment {
path_ctx.set_current_segment(last_segment)
}
path_ctx.substs_from_path(value_def, true, false)
});
let substs = substs.as_slice(Interner);
if let ValueNs::EnumVariantId(_) = value {
let mut it = substs
.iter()
.chain(self_subst.as_ref().map_or(&[][..], |s| s.as_slice(Interner)))
.cloned();
let builder = TyBuilder::subst_for_def(self.db, generic_def, None);
let substs = builder
.fill(|x| {
it.next().unwrap_or_else(|| match x {
ParamKind::Type => {
self.result.standard_types.unknown.clone().cast(Interner)
}
ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
ParamKind::Lifetime => error_lifetime().cast(Interner),
})
})
.build();
return Some(ValuePathResolution::GenericDef(value_def, generic_def, substs));
}
let parent_substs = self_subst.or_else(|| {
let generics = generics(self.db, generic_def);
let parent_params_len = generics.parent_generics()?.len();
let parent_args = &substs[..parent_params_len];
Some(Substitution::from_iter(Interner, parent_args))
});
let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
let mut it = substs.iter().skip(parent_substs_len).cloned();
let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs);
let substs = builder
.fill(|x| {
it.next().unwrap_or_else(|| match x {
ParamKind::Type => self.result.standard_types.unknown.clone().cast(Interner),
ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
ParamKind::Lifetime => error_lifetime().cast(Interner),
})
let substs = if self_subst.is_some_and(|it| !it.is_empty())
&& matches!(value_def, ValueTyDefId::EnumVariantId(_))
{
// This is something like `TypeAlias::<Args>::EnumVariant`. Do not call `substs_from_path()`,
// as it'll try to re-lower the previous segment assuming it refers to the enum, but it refers
// to the type alias and they may have different generics.
self.types.empty_args
} else {
self.with_body_ty_lowering(|ctx| {
let mut path_ctx = ctx.at_path(path, id);
let last_segment = path.segments().len().checked_sub(1);
if let Some(last_segment) = last_segment {
path_ctx.set_current_segment(last_segment)
}
path_ctx.substs_from_path(value_def, true, false)
})
.build();
};
let parent_substs_len = self_subst.map_or(0, |it| it.len());
let substs = GenericArgs::fill_rest(
self.interner(),
generic_def.into(),
self_subst.iter().flat_map(|it| it.iter()).chain(substs.iter().skip(parent_substs_len)),
|_, _, id, _| GenericArg::error_from_id(self.interner(), id),
);
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
}
@ -161,7 +129,7 @@ impl<'db> InferenceContext<'db> {
path: &Path,
id: ExprOrPatId,
no_diagnostics: bool,
) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
) -> Option<(ValueNs, Option<GenericArgs<'db>>)> {
// Don't use `self.make_ty()` here as we need `orig_ns`.
let mut ctx = TyLoweringContext::new(
self.db,
@ -211,7 +179,7 @@ impl<'db> InferenceContext<'db> {
let (resolution, substs) = match (def, is_before_last) {
(TypeNs::TraitId(trait_), true) => {
let self_ty = self.table.new_type_var();
let self_ty = self.table.next_ty_var();
let trait_ref =
path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty, true);
drop_ctx(ctx, no_diagnostics);
@ -225,7 +193,7 @@ impl<'db> InferenceContext<'db> {
path_ctx.ignore_last_segment();
let (ty, _) = path_ctx.lower_partly_resolved_path(def, true);
drop_ctx(ctx, no_diagnostics);
if ty.is_unknown() {
if ty.is_ty_error() {
return None;
}
@ -241,21 +209,25 @@ impl<'db> InferenceContext<'db> {
return Some((value, self_subst));
#[inline]
fn drop_ctx(mut ctx: TyLoweringContext<'_>, no_diagnostics: bool) {
fn drop_ctx(mut ctx: TyLoweringContext<'_, '_>, no_diagnostics: bool) {
if no_diagnostics {
ctx.forget_diagnostics();
}
}
}
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
let predicates = self.db.generic_predicates(def);
for predicate in predicates.iter() {
let (predicate, binders) =
predicate.clone().substitute(Interner, &subst).into_value_and_skipped_binders();
// Quantified where clauses are not yet handled.
stdx::always!(binders.is_empty(Interner));
self.push_obligation(predicate.cast(Interner));
fn add_required_obligations_for_value_path(
&mut self,
def: GenericDefId,
subst: GenericArgs<'db>,
) {
let predicates = self.db.generic_predicates_ns(def);
let interner = self.interner();
let param_env = self.table.trait_env.env;
if let Some(predicates) = predicates.instantiate(self.interner(), subst) {
self.table.register_predicates(predicates.map(|predicate| {
Obligation::new(interner, ObligationCause::new(), param_env, predicate)
}));
}
// We need to add `Self: Trait` obligation when `def` is a trait assoc item.
@ -267,21 +239,27 @@ impl<'db> InferenceContext<'db> {
if let ItemContainerId::TraitId(trait_) = container {
let parent_len = generics(self.db, def).parent_generics().map_or(0, |g| g.len_self());
let parent_subst =
Substitution::from_iter(Interner, subst.iter(Interner).take(parent_len));
let trait_ref =
TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: parent_subst };
self.push_obligation(trait_ref.cast(Interner));
let parent_subst = GenericArgs::new_from_iter(
interner,
subst.as_slice()[..parent_len].iter().copied(),
);
let trait_ref = TraitRef::new(interner, trait_.into(), parent_subst);
self.table.register_predicate(Obligation::new(
interner,
ObligationCause::new(),
param_env,
trait_ref,
));
}
}
fn resolve_trait_assoc_item(
&mut self,
trait_ref: TraitRef,
trait_ref: TraitRef<'db>,
segment: PathSegment<'_>,
id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
) -> Option<(ValueNs, GenericArgs<'db>)> {
let trait_ = trait_ref.def_id.0;
let item =
trait_.trait_items(self.db).items.iter().map(|(_name, id)| *id).find_map(|item| {
match item {
@ -309,25 +287,25 @@ impl<'db> InferenceContext<'db> {
AssocItemId::TypeAliasId(_) => unreachable!(),
};
self.write_assoc_resolution(id, item, trait_ref.substitution.clone());
Some((def, trait_ref.substitution))
self.write_assoc_resolution(id, item, trait_ref.args);
Some((def, trait_ref.args))
}
fn resolve_ty_assoc_item(
&mut self,
ty: Ty,
ty: Ty<'db>,
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> {
if let TyKind::Error = ty.kind(Interner) {
) -> Option<(ValueNs, GenericArgs<'db>)> {
if ty.is_ty_error() {
return None;
}
if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
if let Some(result) = self.resolve_enum_variant_on_ty(ty, name, id) {
return Some(result);
}
let canonical_ty = self.canonicalize(ty.clone().to_nextsolver(self.table.interner));
let canonical_ty = self.canonicalize(ty);
let mut not_visible = None;
let res = method_resolution::iterate_method_candidates(
@ -362,24 +340,28 @@ impl<'db> InferenceContext<'db> {
};
let substs = match container {
ItemContainerId::ImplId(impl_id) => {
let interner = DbInterner::new_with(self.db, None, None);
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
.fill_with_inference_vars(&mut self.table)
.build();
let args: crate::next_solver::GenericArgs<'_> = impl_substs.to_nextsolver(interner);
let impl_substs = self.table.fresh_args_for_item(impl_id.into());
let impl_self_ty =
self.db.impl_self_ty(impl_id).instantiate(interner, args).to_chalk(interner);
self.unify(&impl_self_ty, &ty);
self.db.impl_self_ty(impl_id).instantiate(self.interner(), impl_substs);
self.unify(impl_self_ty, ty);
impl_substs
}
ItemContainerId::TraitId(trait_) => {
// we're picking this method
let trait_ref = TyBuilder::trait_ref(self.db, trait_)
.push(ty.clone())
.fill_with_inference_vars(&mut self.table)
.build();
self.push_obligation(trait_ref.clone().cast(Interner));
trait_ref.substitution
let args = GenericArgs::fill_rest(
self.interner(),
trait_.into(),
[ty.into()],
|_, _, id, _| self.table.next_var_for_param(id),
);
let trait_ref = TraitRef::new(self.interner(), trait_.into(), args);
self.table.register_predicate(Obligation::new(
self.interner(),
ObligationCause::new(),
self.table.trait_env.env,
trait_ref,
));
args
}
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
never!("assoc item contained in module/extern block");
@ -387,7 +369,7 @@ impl<'db> InferenceContext<'db> {
}
};
self.write_assoc_resolution(id, item, substs.clone());
self.write_assoc_resolution(id, item, substs);
if !visible {
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
}
@ -396,11 +378,11 @@ impl<'db> InferenceContext<'db> {
fn resolve_enum_variant_on_ty(
&mut self,
ty: &Ty,
ty: Ty<'db>,
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> {
let ty = self.table.structurally_resolve_type(ty);
) -> Option<(ValueNs, GenericArgs<'db>)> {
let ty = self.table.try_structurally_resolve_type(ty);
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
_ => return None,
@ -408,14 +390,14 @@ impl<'db> InferenceContext<'db> {
let enum_data = enum_id.enum_variants(self.db);
let variant = enum_data.variant(name)?;
self.write_variant_resolution(id, variant.into());
Some((ValueNs::EnumVariantId(variant), subst.clone()))
Some((ValueNs::EnumVariantId(variant), subst))
}
}
#[derive(Debug)]
enum ValuePathResolution {
enum ValuePathResolution<'db> {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.
GenericDef(ValueTyDefId, GenericDefId, Substitution),
NonGeneric(Ty),
GenericDef(ValueTyDefId, GenericDefId, GenericArgs<'db>),
NonGeneric(Ty<'db>),
}

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,9 @@ use triomphe::Arc;
use crate::{
AliasTy, Binders, Interner, Substitution, TraitEnvironment, Ty, TyKind,
consteval::try_const_usize, db::HirDatabase,
consteval::try_const_usize,
db::HirDatabase,
next_solver::{DbInterner, mapping::ChalkToNextSolver},
};
// FIXME: Turn this into a query, it can be quite slow
@ -79,14 +81,17 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
}
self.recursive_ty.insert(ty.clone());
self.max_depth -= 1;
let interner = DbInterner::new_with(self.db, None, None);
let r = match ty.kind(Interner) {
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
TyKind::Array(item_ty, len) => match try_const_usize(self.db, len) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
TyKind::Array(item_ty, len) => {
match try_const_usize(self.db, len.to_nextsolver(interner)) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
}
}
TyKind::Alias(AliasTy::Projection(projection)) => {
// FIXME: I think this currently isn't used for monomorphized bodies, so there is no need to handle
// `TyKind::AssociatedType`, but perhaps in the future it will.

View file

@ -19,15 +19,13 @@ use rustc_type_ir::{
};
use triomphe::Arc;
use crate::utils::ClosureSubst;
use crate::{
Interner, TraitEnvironment,
consteval_nextsolver::try_const_usize,
TraitEnvironment,
consteval::try_const_usize,
db::HirDatabase,
next_solver::{
DbInterner, GenericArgs, ParamEnv, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
mapping::{ChalkToNextSolver, convert_args_for_result},
},
};
@ -325,19 +323,12 @@ pub fn layout_of_ty_query<'db>(
TyKind::Closure(id, args) => {
let def = db.lookup_intern_closure(id.0);
let infer = db.infer(def.0);
let (captures, _) = infer.closure_info(&id.0.into());
let (captures, _) = infer.closure_info(id.0);
let fields = captures
.iter()
.map(|it| {
let ty = it
.ty
.clone()
.substitute(
Interner,
&ClosureSubst(&convert_args_for_result(interner, args.inner()))
.parent_subst(db),
)
.to_nextsolver(interner);
let ty =
it.ty.instantiate(interner, args.split_closure_args_untupled().parent_args);
db.layout_of_ty(ty, trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
@ -394,7 +385,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) -
}
}
TyKind::Tuple(tys) => {
if let Some(last_field_ty) = tys.iter().last() {
if let Some(last_field_ty) = tys.iter().next_back() {
struct_tail_erasing_lifetimes(db, last_field_ty)
} else {
pointee

View file

@ -39,7 +39,7 @@ pub fn target_data_layout_query(
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifer in "data-layout": {err}"#),
TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifier in "data-layout": {err}"#),
}.into())
}
},

View file

@ -3,7 +3,7 @@ use either::Either;
use hir_def::db::DefDatabase;
use project_model::{Sysroot, toolchain_info::QueryConfig};
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{GenericArgs as _, Ty as _};
use rustc_type_ir::inherent::GenericArgs as _;
use syntax::ToSmolStr;
use test_fixture::WithFixture;
use triomphe::Arc;
@ -11,7 +11,7 @@ use triomphe::Arc;
use crate::{
db::HirDatabase,
layout::{Layout, LayoutError},
next_solver::{AdtDef, DbInterner, GenericArgs, mapping::ChalkToNextSolver},
next_solver::{DbInterner, GenericArgs},
setup_tracing,
test_db::TestDB,
};
@ -79,12 +79,12 @@ fn eval_goal(
Some(adt_or_type_alias_id)
})
.unwrap();
salsa::attach(&db, || {
crate::attach_db(&db, || {
let interner = DbInterner::new_with(&db, None, None);
let goal_ty = match adt_or_type_alias_id {
Either::Left(adt_id) => crate::next_solver::Ty::new_adt(
interner,
AdtDef::new(adt_id, interner),
adt_id,
GenericArgs::identity_for_item(interner, adt_id.into()),
),
Either::Right(ty_id) => db.ty(ty_id.into()).instantiate_identity(),
@ -112,31 +112,33 @@ fn eval_expr(
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name =
db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
(name == "main").then_some(x)
}
_ => None,
})
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
.bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b].clone();
salsa::attach(&db, || {
let interner = DbInterner::new_with(&db, None, None);
db.layout_of_ty(goal_ty.to_nextsolver(interner), db.trait_environment(function_id.into()))
crate::attach_db(&db, || {
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name = db
.function_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr();
(name == "main").then_some(x)
}
_ => None,
})
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
.bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b];
db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
})
}

View file

@ -21,10 +21,11 @@ extern crate ra_ap_rustc_type_ir as rustc_type_ir;
extern crate ra_ap_rustc_next_trait_solver as rustc_next_trait_solver;
extern crate self as hir_ty;
mod builder;
mod chalk_db;
mod chalk_ext;
mod drop;
mod infer;
mod inhabitedness;
mod interner;
@ -38,10 +39,11 @@ mod utils;
pub mod autoderef;
pub mod consteval;
pub mod consteval_nextsolver;
mod consteval_chalk;
pub mod db;
pub mod diagnostics;
pub mod display;
pub mod drop;
pub mod dyn_compatibility;
pub mod generics;
pub mod lang_items;
@ -60,11 +62,10 @@ mod variance;
use std::hash::Hash;
use chalk_ir::{
NoSolution, VariableKinds,
VariableKinds,
fold::{Shift, TypeFoldable},
interner::HasInterner,
};
use either::Either;
use hir_def::{CallableDefId, GeneralConstId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness};
use hir_expand::name::Name;
use indexmap::{IndexMap, map::Entry};
@ -73,15 +74,16 @@ use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::{
UpcastFrom,
inherent::{SliceLike, Ty as _},
TypeSuperVisitable, TypeVisitableExt, UpcastFrom,
inherent::{IntoKind, SliceLike, Ty as _},
};
use syntax::ast::{ConstArg, make};
use traits::FnTrait;
use triomphe::Arc;
#[cfg(not(debug_assertions))]
use crate::next_solver::ErrorGuaranteed;
use crate::{
consteval::unknown_const,
db::HirDatabase,
display::{DisplayTarget, HirDisplay},
generics::Generics,
@ -95,7 +97,6 @@ use crate::{
pub use autoderef::autoderef;
pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use drop::DropGlue;
pub use infer::{
Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult,
InferenceTyDiagnosticSource, OverloadedDeref, PointerCast,
@ -104,17 +105,17 @@ pub use infer::{
could_coerce, could_unify, could_unify_deeply,
};
pub use interner::Interner;
pub use lower::{
ImplTraitLoweringMode, LifetimeElisionKind, ParamLoweringMode, TyDefId, TyLoweringContext,
ValueTyDefId, diagnostics::*,
pub use lower::{ImplTraitLoweringMode, ParamLoweringMode, TyDefId, ValueTyDefId, diagnostics::*};
pub use lower_nextsolver::{
LifetimeElisionKind, TyLoweringContext, associated_type_shorthand_candidates,
};
pub use lower_nextsolver::associated_type_shorthand_candidates;
pub use mapping::{
ToChalk, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id,
to_foreign_def_id, to_placeholder_idx, to_placeholder_idx_no_index,
};
pub use method_resolution::check_orphan_rules;
pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db};
pub use target_feature::TargetFeatures;
pub use traits::TraitEnvironment;
pub use utils::{
@ -123,20 +124,16 @@ pub use utils::{
};
pub use variance::Variance;
pub use chalk_ir::{
AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
cast::Cast,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
};
use chalk_ir::{AdtId, BoundVar, DebruijnIndex, Safety, Scalar};
pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
pub type FnDefId = chalk_ir::FnDefId<Interner>;
pub type ClosureId = chalk_ir::ClosureId<Interner>;
pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
pub(crate) type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
pub(crate) type FnDefId = chalk_ir::FnDefId<Interner>;
pub(crate) type ClosureId = chalk_ir::ClosureId<Interner>;
pub(crate) type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
pub(crate) type PlaceholderIndex = chalk_ir::PlaceholderIndex;
pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
pub(crate) type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
pub(crate) type VariableKind = chalk_ir::VariableKind<Interner>;
/// Represents generic parameters and an item bound by them. When the item has parent, the binders
@ -147,49 +144,48 @@ pub(crate) type VariableKind = chalk_ir::VariableKind<Interner>;
/// parameters/arguments for an item MUST come before those for its parent. This is to facilitate
/// the integration with chalk-solve, which mildly puts constraints as such. See #13335 for its
/// motivation in detail.
pub type Binders<T> = chalk_ir::Binders<T>;
pub(crate) type Binders<T> = chalk_ir::Binders<T>;
/// Interned list of generic arguments for an item. When an item has parent, the `Substitution` for
/// it contains generic arguments for both its parent and itself. See chalk's documentation for
/// details.
///
/// See `Binders` for the constraint on the ordering.
pub type Substitution = chalk_ir::Substitution<Interner>;
pub type GenericArg = chalk_ir::GenericArg<Interner>;
pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
pub(crate) type Substitution = chalk_ir::Substitution<Interner>;
pub(crate) type GenericArg = chalk_ir::GenericArg<Interner>;
pub(crate) type GenericArgData = chalk_ir::GenericArgData<Interner>;
pub type Ty = chalk_ir::Ty<Interner>;
pub(crate) type Ty = chalk_ir::Ty<Interner>;
pub type TyKind = chalk_ir::TyKind<Interner>;
pub type TypeFlags = chalk_ir::TypeFlags;
pub(crate) type TypeFlags = chalk_ir::TypeFlags;
pub(crate) type DynTy = chalk_ir::DynTy<Interner>;
pub type FnPointer = chalk_ir::FnPointer<Interner>;
pub(crate) type FnPointer = chalk_ir::FnPointer<Interner>;
pub(crate) use chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor
pub type AliasTy = chalk_ir::AliasTy<Interner>;
pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub(crate) type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub(crate) type OpaqueTy = chalk_ir::OpaqueTy<Interner>;
pub(crate) type InferenceVar = chalk_ir::InferenceVar;
pub(crate) type Lifetime = chalk_ir::Lifetime<Interner>;
pub(crate) type LifetimeData = chalk_ir::LifetimeData<Interner>;
pub(crate) type LifetimeOutlives = chalk_ir::LifetimeOutlives<Interner>;
pub type ConstValue = chalk_ir::ConstValue<Interner>;
pub(crate) type ConstValue = chalk_ir::ConstValue<Interner>;
pub type Const = chalk_ir::Const<Interner>;
pub(crate) type Const = chalk_ir::Const<Interner>;
pub(crate) type ConstData = chalk_ir::ConstData<Interner>;
pub(crate) type ConcreteConst = chalk_ir::ConcreteConst<Interner>;
pub type TraitRef = chalk_ir::TraitRef<Interner>;
pub type QuantifiedWhereClause = Binders<WhereClause>;
pub type Canonical<T> = chalk_ir::Canonical<T>;
pub(crate) type TraitRef = chalk_ir::TraitRef<Interner>;
pub(crate) type QuantifiedWhereClause = Binders<WhereClause>;
pub(crate) type Canonical<T> = chalk_ir::Canonical<T>;
pub(crate) type ChalkTraitId = chalk_ir::TraitId<Interner>;
pub(crate) type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses<Interner>;
pub(crate) type FnSig = chalk_ir::FnSig<Interner>;
pub type InEnvironment<T> = chalk_ir::InEnvironment<T>;
pub(crate) type InEnvironment<T> = chalk_ir::InEnvironment<T>;
pub type AliasEq = chalk_ir::AliasEq<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>;
@ -233,7 +229,7 @@ impl ComplexMemoryMap<'_> {
}
impl<'db> MemoryMap<'db> {
pub fn vtable_ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>, MirEvalError> {
pub fn vtable_ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>, MirEvalError<'db>> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
@ -249,8 +245,8 @@ impl<'db> MemoryMap<'db> {
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
fn transform_addresses(
&self,
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<FxHashMap<usize, usize>, MirEvalError> {
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError<'db>>,
) -> Result<FxHashMap<usize, usize>, MirEvalError<'db>> {
let mut transform = |(addr, val): (&usize, &[u8])| {
let addr = *addr;
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
@ -646,7 +642,7 @@ impl TypeFoldable<Interner> for CallableSig {
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
pub enum ImplTraitId {
ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx),
ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), // FIXME(next-solver): Should be crate::nextsolver::ImplTraitIdx.
TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
}
@ -713,219 +709,170 @@ pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<
t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST)
}
pub(crate) fn fold_tys<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
t: T,
mut for_ty: impl FnMut(Ty, DebruijnIndex) -> Ty,
binders: DebruijnIndex,
) -> T {
fold_tys_and_consts(
t,
|x, d| match x {
Either::Left(x) => Either::Left(for_ty(x, d)),
Either::Right(x) => Either::Right(x),
},
binders,
)
}
pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
t: T,
f: impl FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>,
binders: DebruijnIndex,
) -> T {
use chalk_ir::fold::{TypeFolder, TypeSuperFoldable};
#[derive(chalk_derive::FallibleTypeFolder)]
#[has_interner(Interner)]
struct TyFolder<F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>>(F);
impl<F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>> TypeFolder<Interner>
for TyFolder<F>
{
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Ty {
let ty = ty.super_fold_with(self.as_dyn(), outer_binder);
self.0(Either::Left(ty), outer_binder).left().unwrap()
}
fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Const {
self.0(Either::Right(c), outer_binder).right().unwrap()
}
}
t.fold_with(&mut TyFolder(f), binders)
}
pub(crate) fn fold_generic_args<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
t: T,
f: impl FnMut(GenericArgData, DebruijnIndex) -> GenericArgData,
binders: DebruijnIndex,
) -> T {
use chalk_ir::fold::{TypeFolder, TypeSuperFoldable};
#[derive(chalk_derive::FallibleTypeFolder)]
#[has_interner(Interner)]
struct TyFolder<F: FnMut(GenericArgData, DebruijnIndex) -> GenericArgData>(F);
impl<F: FnMut(GenericArgData, DebruijnIndex) -> GenericArgData> TypeFolder<Interner>
for TyFolder<F>
{
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Ty {
let ty = ty.super_fold_with(self.as_dyn(), outer_binder);
self.0(GenericArgData::Ty(ty), outer_binder)
.intern(Interner)
.ty(Interner)
.unwrap()
.clone()
}
fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Const {
self.0(GenericArgData::Const(c), outer_binder)
.intern(Interner)
.constant(Interner)
.unwrap()
.clone()
}
fn fold_lifetime(&mut self, lt: Lifetime, outer_binder: DebruijnIndex) -> Lifetime {
let lt = lt.super_fold_with(self.as_dyn(), outer_binder);
self.0(GenericArgData::Lifetime(lt), outer_binder)
.intern(Interner)
.lifetime(Interner)
.unwrap()
.clone()
}
}
t.fold_with(&mut TyFolder(f), binders)
}
/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
/// ensures there are no unbound variables or inference variables anywhere in
/// the `t`.
pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
pub fn replace_errors_with_variables<'db, T>(
interner: DbInterner<'db>,
t: &T,
) -> crate::next_solver::Canonical<'db, T>
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
T: rustc_type_ir::TypeFoldable<DbInterner<'db>> + Clone,
{
use chalk_ir::{
Fallible,
fold::{FallibleTypeFolder, TypeSuperFoldable},
};
struct ErrorReplacer {
vars: usize,
use rustc_type_ir::{FallibleTypeFolder, TypeSuperFoldable};
struct ErrorReplacer<'db> {
interner: DbInterner<'db>,
vars: Vec<crate::next_solver::CanonicalVarKind<'db>>,
binder: rustc_type_ir::DebruijnIndex,
}
impl FallibleTypeFolder<Interner> for ErrorReplacer {
type Error = NoSolution;
impl<'db> FallibleTypeFolder<DbInterner<'db>> for ErrorReplacer<'db> {
#[cfg(debug_assertions)]
type Error = ();
#[cfg(not(debug_assertions))]
type Error = std::convert::Infallible;
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
fn cx(&self) -> DbInterner<'db> {
self.interner
}
fn interner(&self) -> Interner {
Interner
fn try_fold_binder<T>(
&mut self,
t: crate::next_solver::Binder<'db, T>,
) -> Result<crate::next_solver::Binder<'db, T>, Self::Error>
where
T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.binder.shift_in(1);
let result = t.try_super_fold_with(self);
self.binder.shift_out(1);
result
}
fn try_fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
if let TyKind::Error = ty.kind(Interner) {
let index = self.vars;
self.vars += 1;
Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner))
} else {
ty.try_super_fold_with(self.as_dyn(), outer_binder)
fn try_fold_ty(
&mut self,
t: crate::next_solver::Ty<'db>,
) -> Result<crate::next_solver::Ty<'db>, Self::Error> {
if !t.has_type_flags(
rustc_type_ir::TypeFlags::HAS_ERROR
| rustc_type_ir::TypeFlags::HAS_TY_INFER
| rustc_type_ir::TypeFlags::HAS_CT_INFER
| rustc_type_ir::TypeFlags::HAS_RE_INFER,
) {
return Ok(t);
}
#[cfg(debug_assertions)]
let error = || Err(());
#[cfg(not(debug_assertions))]
let error = || Ok(crate::next_solver::Ty::new_error(self.interner, ErrorGuaranteed));
match t.kind() {
crate::next_solver::TyKind::Error(_) => {
let var = rustc_type_ir::BoundVar::from_usize(self.vars.len());
self.vars.push(crate::next_solver::CanonicalVarKind::Ty {
ui: rustc_type_ir::UniverseIndex::ZERO,
sub_root: var,
});
Ok(crate::next_solver::Ty::new_bound(
self.interner,
self.binder,
crate::next_solver::BoundTy {
var,
kind: crate::next_solver::BoundTyKind::Anon,
},
))
}
crate::next_solver::TyKind::Infer(_) => error(),
crate::next_solver::TyKind::Bound(index, _) if index > self.binder => error(),
_ => t.try_super_fold_with(self),
}
}
fn try_fold_inference_ty(
fn try_fold_const(
&mut self,
_var: InferenceVar,
_kind: TyVariableKind,
_outer_binder: DebruijnIndex,
) -> Fallible<Ty> {
if cfg!(debug_assertions) {
// we don't want to just panic here, because then the error message
// won't contain the whole thing, which would not be very helpful
Err(NoSolution)
} else {
Ok(TyKind::Error.intern(Interner))
ct: crate::next_solver::Const<'db>,
) -> Result<crate::next_solver::Const<'db>, Self::Error> {
if !ct.has_type_flags(
rustc_type_ir::TypeFlags::HAS_ERROR
| rustc_type_ir::TypeFlags::HAS_TY_INFER
| rustc_type_ir::TypeFlags::HAS_CT_INFER
| rustc_type_ir::TypeFlags::HAS_RE_INFER,
) {
return Ok(ct);
}
#[cfg(debug_assertions)]
let error = || Err(());
#[cfg(not(debug_assertions))]
let error = || Ok(crate::next_solver::Const::error(self.interner));
match ct.kind() {
crate::next_solver::ConstKind::Error(_) => {
let var = rustc_type_ir::BoundVar::from_usize(self.vars.len());
self.vars.push(crate::next_solver::CanonicalVarKind::Const(
rustc_type_ir::UniverseIndex::ZERO,
));
Ok(crate::next_solver::Const::new_bound(
self.interner,
self.binder,
crate::next_solver::BoundConst { var },
))
}
crate::next_solver::ConstKind::Infer(_) => error(),
crate::next_solver::ConstKind::Bound(index, _) if index > self.binder => error(),
_ => ct.try_super_fold_with(self),
}
}
fn try_fold_free_var_ty(
fn try_fold_region(
&mut self,
_bound_var: BoundVar,
_outer_binder: DebruijnIndex,
) -> Fallible<Ty> {
if cfg!(debug_assertions) {
// we don't want to just panic here, because then the error message
// won't contain the whole thing, which would not be very helpful
Err(NoSolution)
} else {
Ok(TyKind::Error.intern(Interner))
region: crate::next_solver::Region<'db>,
) -> Result<crate::next_solver::Region<'db>, Self::Error> {
#[cfg(debug_assertions)]
let error = || Err(());
#[cfg(not(debug_assertions))]
let error = || Ok(crate::next_solver::Region::error(self.interner));
match region.kind() {
crate::next_solver::RegionKind::ReError(_) => {
let var = rustc_type_ir::BoundVar::from_usize(self.vars.len());
self.vars.push(crate::next_solver::CanonicalVarKind::Region(
rustc_type_ir::UniverseIndex::ZERO,
));
Ok(crate::next_solver::Region::new_bound(
self.interner,
self.binder,
crate::next_solver::BoundRegion {
var,
kind: crate::next_solver::BoundRegionKind::Anon,
},
))
}
crate::next_solver::RegionKind::ReVar(_) => error(),
crate::next_solver::RegionKind::ReBound(index, _) if index > self.binder => error(),
_ => Ok(region),
}
}
fn try_fold_inference_const(
&mut self,
ty: Ty,
_var: InferenceVar,
_outer_binder: DebruijnIndex,
) -> Fallible<Const> {
if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) }
}
fn try_fold_free_var_const(
&mut self,
ty: Ty,
_bound_var: BoundVar,
_outer_binder: DebruijnIndex,
) -> Fallible<Const> {
if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) }
}
fn try_fold_inference_lifetime(
&mut self,
_var: InferenceVar,
_outer_binder: DebruijnIndex,
) -> Fallible<Lifetime> {
if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) }
}
fn try_fold_free_var_lifetime(
&mut self,
_bound_var: BoundVar,
_outer_binder: DebruijnIndex,
) -> Fallible<Lifetime> {
if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) }
}
}
let mut error_replacer = ErrorReplacer { vars: 0 };
let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
let mut error_replacer =
ErrorReplacer { vars: Vec::new(), binder: rustc_type_ir::DebruijnIndex::ZERO, interner };
let value = match t.clone().try_fold_with(&mut error_replacer) {
Ok(t) => t,
Err(_) => panic!("Encountered unbound or inference vars in {t:?}"),
};
let kinds = (0..error_replacer.vars).map(|_| {
chalk_ir::CanonicalVarKind::new(
chalk_ir::VariableKind::Ty(TyVariableKind::General),
chalk_ir::UniverseIndex::ROOT,
)
});
Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
crate::next_solver::Canonical {
value,
max_universe: rustc_type_ir::UniverseIndex::ZERO,
variables: crate::next_solver::CanonicalVars::new_from_iter(interner, error_replacer.vars),
}
}
pub fn callable_sig_from_fn_trait<'db>(
self_ty: &Ty,
self_ty: crate::next_solver::Ty<'db>,
trait_env: Arc<TraitEnvironment<'db>>,
db: &'db dyn HirDatabase,
) -> Option<(FnTrait, CallableSig)> {
) -> Option<(FnTrait, crate::next_solver::PolyFnSig<'db>)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let output_assoc_type = fn_once_trait
@ -942,46 +889,47 @@ pub fn callable_sig_from_fn_trait<'db>(
// - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
let args_ty = table.next_ty_var();
let args = [self_ty.to_nextsolver(table.interner), args_ty];
let trait_ref = crate::next_solver::TraitRef::new(table.interner, fn_once_trait.into(), args);
let args = [self_ty, args_ty];
let trait_ref = crate::next_solver::TraitRef::new(table.interner(), fn_once_trait.into(), args);
let projection = crate::next_solver::Ty::new_alias(
table.interner,
table.interner(),
rustc_type_ir::AliasTyKind::Projection,
crate::next_solver::AliasTy::new(table.interner, output_assoc_type.into(), args),
crate::next_solver::AliasTy::new(table.interner(), output_assoc_type.into(), args),
);
let pred = crate::next_solver::Predicate::upcast_from(trait_ref, table.interner);
let pred = crate::next_solver::Predicate::upcast_from(trait_ref, table.interner());
if !table.try_obligation(pred).no_solution() {
table.register_obligation(pred);
let return_ty = table.normalize_alias_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(db, krate)?;
let trait_ref =
crate::next_solver::TraitRef::new(table.interner, fn_x_trait.into(), args);
crate::next_solver::TraitRef::new(table.interner(), fn_x_trait.into(), args);
if !table
.try_obligation(crate::next_solver::Predicate::upcast_from(
trait_ref,
table.interner,
table.interner(),
))
.no_solution()
{
let ret_ty = table.resolve_completely(return_ty.to_chalk(table.interner));
let args_ty = table.resolve_completely(args_ty.to_chalk(table.interner));
let params = args_ty
.as_tuple()?
.iter(Interner)
.map(|it| it.assert_ty_ref(Interner))
.cloned();
let ret_ty = table.resolve_completely(return_ty);
let args_ty = table.resolve_completely(args_ty);
let crate::next_solver::TyKind::Tuple(params) = args_ty.kind() else {
return None;
};
let inputs_and_output = crate::next_solver::Tys::new_from_iter(
table.interner(),
params.iter().chain(std::iter::once(ret_ty)),
);
return Some((
fn_x,
CallableSig::from_params_and_return(
params,
ret_ty,
false,
Safety::Safe,
FnAbi::RustCall,
),
crate::next_solver::Binder::dummy(crate::next_solver::FnSig {
inputs_and_output,
c_variadic: false,
safety: crate::next_solver::abi::Safety::Safe,
abi: FnAbi::RustCall,
}),
));
}
}
@ -991,74 +939,43 @@ pub fn callable_sig_from_fn_trait<'db>(
}
}
struct PlaceholderCollector<'db> {
db: &'db dyn HirDatabase,
placeholders: FxHashSet<TypeOrConstParamId>,
struct ParamCollector {
params: FxHashSet<TypeOrConstParamId>,
}
impl PlaceholderCollector<'_> {
fn collect(&mut self, idx: PlaceholderIndex) {
let id = from_placeholder_idx(self.db, idx).0;
self.placeholders.insert(id);
}
}
impl<'db> rustc_type_ir::TypeVisitor<DbInterner<'db>> for ParamCollector {
type Result = ();
impl TypeVisitor<Interner> for PlaceholderCollector<'_> {
type BreakTy = ();
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(
&mut self,
ty: &Ty,
outer_binder: DebruijnIndex,
) -> std::ops::ControlFlow<Self::BreakTy> {
let has_placeholder_bits = TypeFlags::HAS_TY_PLACEHOLDER | TypeFlags::HAS_CT_PLACEHOLDER;
let chalk_ir::TyData { kind, flags } = ty.data(Interner);
if let TyKind::Placeholder(idx) = kind {
self.collect(*idx);
} else if flags.intersects(has_placeholder_bits) {
return ty.super_visit_with(self, outer_binder);
} else {
// Fast path: don't visit inner types (e.g. generic arguments) when `flags` indicate
// that there are no placeholders.
fn visit_ty(&mut self, ty: crate::next_solver::Ty<'db>) -> Self::Result {
if let crate::next_solver::TyKind::Param(param) = ty.kind() {
self.params.insert(param.id.into());
}
std::ops::ControlFlow::Continue(())
ty.super_visit_with(self);
}
fn visit_const(
&mut self,
constant: &chalk_ir::Const<Interner>,
_outer_binder: DebruijnIndex,
) -> std::ops::ControlFlow<Self::BreakTy> {
if let chalk_ir::ConstValue::Placeholder(idx) = constant.data(Interner).value {
self.collect(idx);
fn visit_const(&mut self, konst: crate::next_solver::Const<'db>) -> Self::Result {
if let crate::next_solver::ConstKind::Param(param) = konst.kind() {
self.params.insert(param.id.into());
}
std::ops::ControlFlow::Continue(())
konst.super_visit_with(self);
}
}
/// Returns unique placeholders for types and consts contained in `value`.
pub fn collect_placeholders<T>(value: &T, db: &dyn HirDatabase) -> Vec<TypeOrConstParamId>
/// Returns unique params for types and consts contained in `value`.
pub fn collect_params<'db, T>(value: &T) -> Vec<TypeOrConstParamId>
where
T: ?Sized + TypeVisitable<Interner>,
T: ?Sized + rustc_type_ir::TypeVisitable<DbInterner<'db>>,
{
let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() };
_ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
collector.placeholders.into_iter().collect()
let mut collector = ParamCollector { params: FxHashSet::default() };
value.visit_with(&mut collector);
Vec::from_iter(collector.params)
}
pub fn known_const_to_ast(
konst: &Const,
db: &dyn HirDatabase,
pub fn known_const_to_ast<'db>(
konst: crate::next_solver::Const<'db>,
db: &'db dyn HirDatabase,
display_target: DisplayTarget,
) -> Option<ConstArg> {
Some(make::expr_const_value(konst.display(db, display_target).to_string().as_str()))

View file

@ -25,14 +25,14 @@ use chalk_ir::{
use either::Either;
use hir_def::{
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
GenericParamId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId,
TypeOrConstParamId, UnionId, VariantId,
GenericParamId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId,
UnionId, VariantId,
builtin_type::BuiltinType,
expr_store::{ExpressionStore, path::Path},
hir::generics::{GenericParamDataRef, TypeOrConstParamData, WherePredicate},
lang_item::LangItem,
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs},
signatures::{FunctionSignature, TraitFlags},
signatures::TraitFlags,
type_ref::{
ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, TypeBound, TypeRef,
TypeRefId,
@ -49,7 +49,7 @@ use crate::{
ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData, LifetimeOutlives,
QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitRef, TraitRefExt, Ty,
TyBuilder, TyKind, WhereClause, all_super_traits,
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
consteval_chalk::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::HirDatabase,
error_lifetime,
generics::{Generics, generics, trait_self_param_idx},
@ -86,7 +86,7 @@ impl ImplTraitLoweringState {
pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId);
#[derive(Debug, Clone)]
pub enum LifetimeElisionKind {
pub(crate) enum LifetimeElisionKind {
/// Create a new anonymous lifetime parameter and reference it.
///
/// If `report_in_path`, report an error when encountering lifetime elision in a path:
@ -111,39 +111,11 @@ pub enum LifetimeElisionKind {
/// error on default object bounds (e.g., `Box<dyn Foo>`).
AnonymousReportError,
/// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope,
/// otherwise give a warning that the previous behavior of introducing a new early-bound
/// lifetime is a bug and will be removed (if `only_lint` is enabled).
StaticIfNoLifetimeInScope { only_lint: bool },
/// Signal we cannot find which should be the anonymous lifetime.
ElisionFailure,
/// Infer all elided lifetimes.
Infer,
}
impl LifetimeElisionKind {
#[inline]
pub(crate) fn for_const(const_parent: ItemContainerId) -> LifetimeElisionKind {
match const_parent {
ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => {
LifetimeElisionKind::Elided(static_lifetime())
}
ItemContainerId::ImplId(_) => {
LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true }
}
ItemContainerId::TraitId(_) => {
LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false }
}
}
}
#[inline]
pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind {
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() }
}
#[inline]
pub(crate) fn for_fn_ret() -> LifetimeElisionKind {
// FIXME: We should use the elided lifetime here, or `ElisionFailure`.
@ -152,7 +124,7 @@ impl LifetimeElisionKind {
}
#[derive(Debug)]
pub struct TyLoweringContext<'db> {
pub(crate) struct TyLoweringContext<'db> {
pub db: &'db dyn HirDatabase,
resolver: &'db Resolver<'db>,
store: &'db ExpressionStore,
@ -172,7 +144,7 @@ pub struct TyLoweringContext<'db> {
}
impl<'db> TyLoweringContext<'db> {
pub fn new(
pub(crate) fn new(
db: &'db dyn HirDatabase,
resolver: &'db Resolver<'db>,
store: &'db ExpressionStore,
@ -197,7 +169,7 @@ impl<'db> TyLoweringContext<'db> {
}
}
pub fn with_debruijn<T>(
pub(crate) fn with_debruijn<T>(
&mut self,
debruijn: DebruijnIndex,
f: impl FnOnce(&mut TyLoweringContext<'_>) -> T,
@ -208,7 +180,7 @@ impl<'db> TyLoweringContext<'db> {
result
}
pub fn with_shifted_in<T>(
pub(crate) fn with_shifted_in<T>(
&mut self,
debruijn: DebruijnIndex,
f: impl FnOnce(&mut TyLoweringContext<'_>) -> T,
@ -227,25 +199,15 @@ impl<'db> TyLoweringContext<'db> {
result
}
pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
pub(crate) fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self }
}
pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
pub(crate) fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
Self { type_param_mode, ..self }
}
pub fn impl_trait_mode(&mut self, impl_trait_mode: ImplTraitLoweringMode) -> &mut Self {
self.impl_trait_mode = ImplTraitLoweringState::new(impl_trait_mode);
self
}
pub fn type_param_mode(&mut self, type_param_mode: ParamLoweringMode) -> &mut Self {
self.type_param_mode = type_param_mode;
self
}
pub fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind });
}
}
@ -268,12 +230,12 @@ pub enum ParamLoweringMode {
Variable,
}
impl<'a> TyLoweringContext<'a> {
pub fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty {
impl<'db> TyLoweringContext<'db> {
pub(crate) fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty {
self.lower_ty_ext(type_ref).0
}
pub fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const {
pub(crate) fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const {
let const_ref = &self.store[const_ref.expr];
match const_ref {
hir_def::hir::Expr::Path(path) => path_to_const(
@ -328,7 +290,7 @@ impl<'a> TyLoweringContext<'a> {
}
}
pub fn lower_path_as_const(&mut self, path: &Path, const_type: Ty) -> Const {
pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty) -> Const {
path_to_const(
self.db,
self.resolver,
@ -345,7 +307,7 @@ impl<'a> TyLoweringContext<'a> {
self.generics.get_or_init(|| generics(self.db, self.def))
}
pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
pub(crate) fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
let mut res = None;
let type_ref = &self.store[type_ref_id];
let ty = match type_ref {
@ -512,7 +474,7 @@ impl<'a> TyLoweringContext<'a> {
}
#[inline]
fn on_path_diagnostic_callback(type_ref: TypeRefId) -> PathDiagnosticCallback<'static> {
fn on_path_diagnostic_callback<'a>(type_ref: TypeRefId) -> PathDiagnosticCallback<'a, 'db> {
PathDiagnosticCallback {
data: Either::Left(PathDiagnosticCallbackData(type_ref)),
callback: |data, this, diag| {
@ -523,7 +485,7 @@ impl<'a> TyLoweringContext<'a> {
}
#[inline]
fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a> {
fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'db> {
PathLoweringContext::new(
self,
Self::on_path_diagnostic_callback(path_id.type_ref()),
@ -559,7 +521,7 @@ impl<'a> TyLoweringContext<'a> {
&mut self,
path_id: PathId,
explicit_self_ty: Ty,
) -> Option<(TraitRef, PathLoweringContext<'_, 'a>)> {
) -> Option<(TraitRef, PathLoweringContext<'_, 'db>)> {
let mut ctx = self.at_path(path_id);
let resolved = match ctx.resolve_path_in_type_ns_fully()? {
// FIXME(trait_alias): We need to handle trait alias here.
@ -576,7 +538,7 @@ impl<'a> TyLoweringContext<'a> {
&'b mut self,
where_predicate: &'b WherePredicate,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b> {
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'db, 'b> {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@ -598,7 +560,7 @@ impl<'a> TyLoweringContext<'a> {
bound: &'b TypeBound,
self_ty: Ty,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> {
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'db> {
let mut assoc_bounds = None;
let mut clause = None;
match bound {
@ -794,7 +756,7 @@ impl<'a> TyLoweringContext<'a> {
ImplTrait { bounds: crate::make_single_type_binders(predicates) }
}
pub fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Lifetime {
pub(crate) fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Lifetime {
match self.resolver.resolve_lifetime(&self.store[lifetime]) {
Some(resolution) => match resolution {
LifetimeNs::Static => static_lifetime(),

View file

@ -3,15 +3,15 @@
use chalk_ir::{BoundVar, cast::Cast, fold::Shift};
use either::Either;
use hir_def::{
GenericDefId, GenericParamId, Lookup, TraitId,
GenericDefId, GenericParamId, TraitId,
expr_store::{
ExpressionStore, HygieneId,
ExpressionStore,
path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments},
},
hir::generics::{
GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance,
},
resolver::{ResolveValueResult, TypeNs, ValueNs},
resolver::TypeNs,
signatures::TraitFlags,
type_ref::{TypeRef, TypeRefId},
};
@ -21,36 +21,36 @@ use stdx::never;
use crate::{
AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, IncorrectGenericsLenKind,
Interner, ParamLoweringMode, PathGenericsSource, PathLoweringDiagnostic, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyDefId, TyKind,
TyLoweringContext, ValueTyDefId, WhereClause,
consteval::{unknown_const, unknown_const_as_generic},
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyDefId, TyKind, WhereClause,
consteval_chalk::{unknown_const, unknown_const_as_generic},
db::HirDatabase,
error_lifetime,
generics::{Generics, generics},
lower::{LifetimeElisionKind, named_associated_type_shorthand_candidates},
lower::{LifetimeElisionKind, TyLoweringContext, named_associated_type_shorthand_candidates},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::associated_type_by_name_including_super_traits,
};
type CallbackData<'a> = Either<
type CallbackData<'a, 'db> = Either<
super::PathDiagnosticCallbackData,
crate::infer::diagnostics::PathDiagnosticCallbackData<'a>,
crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>,
>;
// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
pub(crate) struct PathDiagnosticCallback<'a> {
pub(crate) data: CallbackData<'a>,
pub(crate) callback: fn(&CallbackData<'_>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic),
pub(crate) struct PathDiagnosticCallback<'a, 'db> {
pub(crate) data: CallbackData<'a, 'db>,
pub(crate) callback:
fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic),
}
pub(crate) struct PathLoweringContext<'a, 'b> {
ctx: &'a mut TyLoweringContext<'b>,
on_diagnostic: PathDiagnosticCallback<'a>,
on_diagnostic: PathDiagnosticCallback<'a, 'b>,
path: &'a Path,
segments: PathSegments<'a>,
current_segment_idx: usize,
@ -62,7 +62,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
#[inline]
pub(crate) fn new(
ctx: &'a mut TyLoweringContext<'b>,
on_diagnostic: PathDiagnosticCallback<'a>,
on_diagnostic: PathDiagnosticCallback<'a, 'b>,
path: &'a Path,
) -> Self {
let segments = path.segments();
@ -109,20 +109,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment);
}
#[inline]
pub(crate) fn ignore_last_segment(&mut self) {
self.segments = self.segments.strip_last();
}
#[inline]
pub(crate) fn set_current_segment(&mut self, segment: usize) {
self.current_segment_idx = segment;
self.current_or_prev_segment = self
.segments
.get(segment)
.expect("invalid segment passed to PathLoweringContext::set_current_segment()");
}
#[inline]
fn with_lifetime_elision<T>(
&mut self,
@ -390,103 +376,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
Some((resolution, remaining_index))
}
pub(crate) fn resolve_path_in_value_ns(
&mut self,
hygiene_id: HygieneId,
) -> Option<ResolveValueResult> {
let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info(
self.ctx.db,
self.path,
hygiene_id,
)?;
let segments = self.segments;
if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
// `segments.is_empty()` can occur with `self`.
return Some(res);
}
let (mod_segments, enum_segment, resolved_segment_idx) = match res {
ResolveValueResult::Partial(_, unresolved_segment, _) => {
(segments.take(unresolved_segment - 1), None, unresolved_segment - 1)
}
ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _)
if prefix_info.enum_variant =>
{
(segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1)
}
ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1),
};
self.current_segment_idx = resolved_segment_idx;
self.current_or_prev_segment =
segments.get(resolved_segment_idx).expect("should have resolved segment");
for (i, mod_segment) in mod_segments.iter().enumerate() {
if mod_segment.args_and_bindings.is_some() {
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
segment: i as u32,
reason: GenericArgsProhibitedReason::Module,
});
}
}
if let Some(enum_segment) = enum_segment
&& segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
{
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
segment: (enum_segment + 1) as u32,
reason: GenericArgsProhibitedReason::EnumVariant,
});
}
match &res {
ResolveValueResult::ValueNs(resolution, _) => {
let resolved_segment_idx = self.current_segment_u32();
let resolved_segment = self.current_or_prev_segment;
let mut prohibit_generics_on_resolved = |reason| {
if resolved_segment.args_and_bindings.is_some() {
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
segment: resolved_segment_idx,
reason,
});
}
};
match resolution {
ValueNs::ImplSelf(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
}
// FIXME: rustc generates E0107 (incorrect number of generic arguments) and not
// E0109 (generic arguments provided for a type that doesn't accept them) for
// consts and statics, presumably as a defense against future in which consts
// and statics can be generic, or just because it was easier for rustc implementors.
// That means we'll show the wrong error code. Because of us it's easier to do it
// this way :)
ValueNs::GenericParam(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
}
ValueNs::StaticId(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
}
ValueNs::LocalBinding(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable)
}
ValueNs::FunctionId(_)
| ValueNs::StructId(_)
| ValueNs::EnumVariantId(_)
| ValueNs::ConstId(_) => {}
}
}
ResolveValueResult::Partial(resolution, _, _) => {
self.handle_type_ns_resolution(resolution);
}
};
Some(res)
}
fn select_associated_type(&mut self, res: Option<TypeNs>, infer_args: bool) -> Ty {
let Some(res) = res else {
return TyKind::Error.intern(Interner);
@ -556,62 +445,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
self.ctx.db.ty(typeable).instantiate(interner, args).to_chalk(interner)
}
/// Collect generic arguments from a path into a `Substs`. See also
/// `create_substs_for_ast_path` and `def_to_ty` in rustc.
pub(crate) fn substs_from_path(
&mut self,
// Note that we don't call `db.value_type(resolved)` here,
// `ValueTyDefId` is just a convenient way to pass generics and
// special-case enum variants
resolved: ValueTyDefId,
infer_args: bool,
lowering_assoc_type_generics: bool,
) -> Substitution {
let prev_current_segment_idx = self.current_segment_idx;
let prev_current_segment = self.current_or_prev_segment;
let generic_def = match resolved {
ValueTyDefId::FunctionId(it) => it.into(),
ValueTyDefId::StructId(it) => it.into(),
ValueTyDefId::UnionId(it) => it.into(),
ValueTyDefId::ConstId(it) => it.into(),
ValueTyDefId::StaticId(_) => return Substitution::empty(Interner),
ValueTyDefId::EnumVariantId(var) => {
// the generic args for an enum variant may be either specified
// on the segment referring to the enum, or on the segment
// referring to the variant. So `Option::<T>::None` and
// `Option::None::<T>` are both allowed (though the former is
// FIXME: This isn't strictly correct, enum variants may be used not through the enum
// (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result
// available here. The worst that can happen is that we will show some confusing diagnostics to the user,
// if generics exist on the module and they don't match with the variant.
// preferred). See also `def_ids_for_path_segments` in rustc.
//
// `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2.
// This simplifies the code a bit.
let penultimate_idx = self.current_segment_idx.wrapping_sub(1);
let penultimate = self.segments.get(penultimate_idx);
if let Some(penultimate) = penultimate
&& self.current_or_prev_segment.args_and_bindings.is_none()
&& penultimate.args_and_bindings.is_some()
{
self.current_segment_idx = penultimate_idx;
self.current_or_prev_segment = penultimate;
}
var.lookup(self.ctx.db).parent.into()
}
};
let result = self.substs_from_path_segment(
generic_def,
infer_args,
None,
lowering_assoc_type_generics,
);
self.current_segment_idx = prev_current_segment_idx;
self.current_or_prev_segment = prev_current_segment;
result
}
pub(crate) fn substs_from_path_segment(
&mut self,
def: GenericDefId,
@ -816,14 +649,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
});
}
fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) {
self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure {
generics_source: self.generics_source,
def,
expected_count,
});
}
fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) {
self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime {
generics_source: self.generics_source,
@ -976,8 +801,6 @@ pub(crate) trait GenericArgsLowerer {
hard_error: bool,
);
fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32);
fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32);
fn report_len_mismatch(
@ -1050,13 +873,6 @@ fn check_generic_args_len(
ctx.report_missing_lifetime(def, lifetime_args_len as u32);
had_error = true
}
LifetimeElisionKind::ElisionFailure => {
ctx.report_elision_failure(def, lifetime_args_len as u32);
had_error = true;
}
LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => {
// FIXME: Check there are other lifetimes in scope, and error/lint.
}
LifetimeElisionKind::Elided(_) => {
ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false);
}
@ -1265,15 +1081,11 @@ pub(crate) fn substs_from_args_and_bindings(
// If there are fewer arguments than parameters, it means we're inferring the remaining arguments.
let param = if let GenericParamId::LifetimeParamId(_) = param_id {
match &lifetime_elision {
LifetimeElisionKind::ElisionFailure
| LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }
| LifetimeElisionKind::AnonymousReportError => {
assert!(had_count_error);
ctx.inferred_kind(def, param_id, param, infer_args, &substs)
}
LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => {
static_lifetime().cast(Interner)
}
LifetimeElisionKind::Elided(lifetime) => lifetime.clone().cast(Interner),
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }
| LifetimeElisionKind::Infer => {

View file

@ -17,6 +17,7 @@ use std::{
use base_db::Crate;
use either::Either;
use hir_def::hir::generics::GenericParamDataRef;
use hir_def::item_tree::FieldsShape;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
@ -35,11 +36,11 @@ use hir_def::{
TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId,
},
};
use hir_def::{ConstId, StaticId};
use hir_def::{ConstId, LifetimeParamId, StaticId, TypeParamId};
use hir_expand::name::Name;
use intern::sym;
use intern::{Symbol, sym};
use la_arena::{Arena, ArenaMap, Idx};
use path::{PathDiagnosticCallback, PathLoweringContext, builtin};
use path::{PathDiagnosticCallback, PathLoweringContext};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
@ -50,16 +51,18 @@ use rustc_type_ir::{
TypeVisitableExt,
inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
};
use rustc_type_ir::{TypeFoldable, TypeFolder, Upcast};
use salsa::plumbing::AsId;
use smallvec::{SmallVec, smallvec};
use stdx::never;
use triomphe::Arc;
use crate::ValueTyDefId;
use crate::next_solver::ParamConst;
use crate::{
FnAbi, ImplTraitId, Interner, ParamKind, TraitEnvironment, TyDefId, TyLoweringDiagnostic,
TyLoweringDiagnosticKind,
consteval_nextsolver::{intern_const_ref, path_to_const, unknown_const_as_generic},
consteval::{intern_const_ref, path_to_const, unknown_const_as_generic},
db::HirDatabase,
generics::{Generics, generics, trait_self_param_idx},
lower::{Diagnostics, PathDiagnosticCallbackData, create_diagnostics},
@ -79,11 +82,11 @@ pub struct ImplTraits<'db> {
}
#[derive(PartialEq, Eq, Debug, Hash)]
pub(crate) struct ImplTrait<'db> {
pub struct ImplTrait<'db> {
pub(crate) predicates: Vec<Clause<'db>>,
}
pub(crate) type ImplTraitIdx<'db> = Idx<ImplTrait<'db>>;
pub type ImplTraitIdx<'db> = Idx<ImplTrait<'db>>;
#[derive(Debug, Default)]
struct ImplTraitLoweringState<'db> {
@ -102,7 +105,7 @@ impl<'db> ImplTraitLoweringState<'db> {
}
#[derive(Debug, Clone)]
pub(crate) enum LifetimeElisionKind<'db> {
pub enum LifetimeElisionKind<'db> {
/// Create a new anonymous lifetime parameter and reference it.
///
/// If `report_in_path`, report an error when encountering lifetime elision in a path:
@ -171,7 +174,7 @@ impl<'db> LifetimeElisionKind<'db> {
}
#[derive(Debug)]
pub(crate) struct TyLoweringContext<'db, 'a> {
pub struct TyLoweringContext<'db, 'a> {
pub db: &'db dyn HirDatabase,
interner: DbInterner<'db>,
resolver: &'a Resolver<'db>,
@ -184,10 +187,12 @@ pub(crate) struct TyLoweringContext<'db, 'a> {
pub(crate) unsized_types: FxHashSet<Ty<'db>>,
pub(crate) diagnostics: Vec<TyLoweringDiagnostic>,
lifetime_elision: LifetimeElisionKind<'db>,
/// We disallow referencing generic parameters that have an index greater than or equal to this number.
disallow_params_after: u32,
}
impl<'db, 'a> TyLoweringContext<'db, 'a> {
pub(crate) fn new(
pub fn new(
db: &'db dyn HirDatabase,
resolver: &'a Resolver<'db>,
store: &'a ExpressionStore,
@ -208,6 +213,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
unsized_types: FxHashSet::default(),
diagnostics: Vec::new(),
lifetime_elision,
disallow_params_after: u32::MAX,
}
}
@ -243,6 +249,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
self
}
pub(crate) fn disallow_params_after(&mut self, after: u32) {
self.disallow_params_after = after;
}
pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind });
}
@ -261,11 +271,11 @@ pub(crate) enum ImplTraitLoweringMode {
}
impl<'db, 'a> TyLoweringContext<'db, 'a> {
pub(crate) fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
pub fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
self.lower_ty_ext(type_ref).0
}
pub(crate) fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty<'db>) -> Const<'db> {
pub(crate) fn lower_const(&mut self, const_ref: ConstRef, const_type: Ty<'db>) -> Const<'db> {
let const_ref = &self.store[const_ref.expr];
match const_ref {
hir_def::hir::Expr::Path(path) => {
@ -323,8 +333,35 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
self.generics.get_or_init(|| generics(self.db, self.def))
}
fn type_param(&mut self, id: TypeParamId, index: u32, name: Symbol) -> Ty<'db> {
if index >= self.disallow_params_after {
// FIXME: Report an error.
Ty::new_error(self.interner, ErrorGuaranteed)
} else {
Ty::new_param(self.interner, id, index, name)
}
}
fn const_param(&mut self, id: ConstParamId, index: u32) -> Const<'db> {
if index >= self.disallow_params_after {
// FIXME: Report an error.
Const::error(self.interner)
} else {
Const::new_param(self.interner, ParamConst { id, index })
}
}
fn region_param(&mut self, id: LifetimeParamId, index: u32) -> Region<'db> {
if index >= self.disallow_params_after {
// FIXME: Report an error.
Region::error(self.interner)
} else {
Region::new_early_param(self.interner, EarlyParamRegion { id, index })
}
}
#[tracing::instrument(skip(self), ret)]
pub(crate) fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option<TypeNs>) {
pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option<TypeNs>) {
let interner = self.interner;
let mut res = None;
let type_ref = &self.store[type_ref_id];
@ -351,8 +388,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
TypeOrConstParamData::TypeParamData(ty) => ty,
_ => unreachable!(),
};
Ty::new_param(
self.interner,
self.type_param(
type_param_id,
idx as u32,
type_data
@ -367,7 +403,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
TypeRef::Array(array) => {
let inner_ty = self.lower_ty(array.ty);
let const_len = self.lower_const(&array.len, Ty::new_usize(interner));
let const_len = self.lower_const(array.len, Ty::new_usize(interner));
Ty::new_array_with_const_len(interner, inner_ty, const_len)
}
&TypeRef::Slice(inner) => {
@ -491,7 +527,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
#[inline]
fn on_path_diagnostic_callback(type_ref: TypeRefId) -> PathDiagnosticCallback<'static, 'db> {
fn on_path_diagnostic_callback<'b>(type_ref: TypeRefId) -> PathDiagnosticCallback<'b, 'db> {
PathDiagnosticCallback {
data: Either::Left(PathDiagnosticCallbackData(type_ref)),
callback: |data, this, diag| {
@ -515,7 +551,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
if let Some(type_ref) = path.type_anchor() {
let (ty, res) = self.lower_ty_ext(type_ref);
let mut ctx = self.at_path(path_id);
return ctx.lower_ty_relative_path(ty, res);
return ctx.lower_ty_relative_path(ty, res, false);
}
let mut ctx = self.at_path(path_id);
@ -545,7 +581,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
TypeNs::TraitId(tr) => tr,
_ => return None,
};
Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx))
Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx))
}
fn lower_trait_ref(
@ -869,7 +905,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
ImplTrait { predicates }
}
pub(crate) fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Region<'db> {
pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> {
match self.resolver.resolve_lifetime(&self.store[lifetime]) {
Some(resolution) => match resolution {
LifetimeNs::Static => Region::new_static(self.interner),
@ -878,10 +914,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
None => return Region::error(self.interner),
Some(idx) => idx,
};
Region::new_early_param(
self.interner,
EarlyParamRegion { index: idx as u32, id },
)
self.region_param(id, idx as u32)
}
},
None => Region::error(self.interner),
@ -980,10 +1013,10 @@ pub(crate) fn type_alias_impl_traits<'db>(
pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
match def {
TyDefId::BuiltinType(it) => EarlyBinder::bind(builtin(interner, it)),
TyDefId::BuiltinType(it) => EarlyBinder::bind(Ty::from_builtin_type(interner, it)),
TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt(
interner,
AdtDef::new(it, interner),
it,
GenericArgs::identity_for_item(interner, it.into()),
)),
TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0,
@ -1368,6 +1401,7 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
pub struct GenericPredicates<'db>(Option<Arc<[Clause<'db>]>>);
impl<'db> GenericPredicates<'db> {
#[inline]
pub fn instantiate(
&self,
interner: DbInterner<'db>,
@ -1377,6 +1411,11 @@ impl<'db> GenericPredicates<'db> {
.as_ref()
.map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args))
}
#[inline]
pub fn instantiate_identity(&self) -> Option<impl Iterator<Item = Clause<'db>>> {
self.0.as_ref().map(|it| it.iter().copied())
}
}
impl<'db> ops::Deref for GenericPredicates<'db> {
@ -1425,8 +1464,7 @@ pub(crate) fn trait_environment_query<'db>(
for pred in maybe_parent_generics.where_predicates() {
for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) {
if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() {
traits_in_scope
.push((convert_ty_for_result(interner, tr.self_ty()), tr.def_id().0));
traits_in_scope.push((tr.self_ty(), tr.def_id().0));
}
clauses.push(pred);
}
@ -1748,6 +1786,113 @@ pub(crate) fn lower_generic_arg<'a, 'db, T>(
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericDefaults<'db>(
Option<Arc<[Option<EarlyBinder<'db, crate::next_solver::GenericArg<'db>>>]>>,
);
impl<'db> GenericDefaults<'db> {
#[inline]
pub fn get(&self, idx: usize) -> Option<EarlyBinder<'db, crate::next_solver::GenericArg<'db>>> {
self.0.as_ref()?[idx]
}
}
pub(crate) fn generic_defaults_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> GenericDefaults<'_> {
db.generic_defaults_ns_with_diagnostics(def).0
}
/// Resolve the default type params from generics.
///
/// Diagnostics are only returned for this `GenericDefId` (returned defaults include parents).
pub(crate) fn generic_defaults_with_diagnostics_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> (GenericDefaults<'_>, Diagnostics) {
let generic_params = generics(db, def);
if generic_params.is_empty() {
return (GenericDefaults(None), None);
}
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
generic_params.store(),
def,
LifetimeElisionKind::AnonymousReportError,
)
.with_impl_trait_mode(ImplTraitLoweringMode::Disallowed);
let mut idx = 0;
let mut has_any_default = false;
let mut defaults = generic_params
.iter_parents_with_store()
.map(|((id, p), store)| {
ctx.store = store;
let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params);
has_any_default |= has_default;
idx += 1;
result
})
.collect::<Vec<_>>();
ctx.diagnostics.clear(); // Don't include diagnostics from the parent.
defaults.extend(generic_params.iter_self().map(|(id, p)| {
let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params);
has_any_default |= has_default;
idx += 1;
result
}));
let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics));
let defaults = if has_any_default {
GenericDefaults(Some(Arc::from_iter(defaults)))
} else {
GenericDefaults(None)
};
return (defaults, diagnostics);
fn handle_generic_param<'db>(
ctx: &mut TyLoweringContext<'db, '_>,
idx: usize,
id: GenericParamId,
p: GenericParamDataRef<'_>,
generic_params: &Generics,
) -> (Option<EarlyBinder<'db, crate::next_solver::GenericArg<'db>>>, bool) {
// Each default can only refer to previous parameters.
// Type variable default referring to parameter coming
// after it is forbidden.
ctx.disallow_params_after(idx as u32);
match p {
GenericParamDataRef::TypeParamData(p) => {
let ty = p.default.map(|ty| ctx.lower_ty(ty));
(ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some())
}
GenericParamDataRef::ConstParamData(p) => {
let GenericParamId::ConstParamId(id) = id else {
unreachable!("Unexpected lifetime or type argument")
};
let mut val = p.default.map(|c| {
let param_ty = ctx.lower_ty(p.ty);
let c = ctx.lower_const(c, param_ty);
c.into()
});
(val.map(EarlyBinder::bind), p.default.is_some())
}
GenericParamDataRef::LifetimeParamData(_) => (None, false),
}
}
}
pub(crate) fn generic_defaults_with_diagnostics_cycle_result(
_db: &dyn HirDatabase,
_def: GenericDefId,
) -> (GenericDefaults<'_>, Diagnostics) {
(GenericDefaults(None), None)
}
/// Build the signature of a callable item (function, struct or enum variant).
pub(crate) fn callable_item_signature_query<'db>(
db: &'db dyn HirDatabase,
@ -1804,7 +1949,7 @@ fn fn_sig_for_fn<'db>(
fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
let args = GenericArgs::identity_for_item(interner, adt.into());
let ty = Ty::new_adt(interner, AdtDef::new(adt, interner), args);
let ty = Ty::new_adt(interner, adt, args);
EarlyBinder::bind(ty)
}

View file

@ -30,7 +30,7 @@ use stdx::never;
use crate::{
GenericArgsProhibitedReason, IncorrectGenericsLenKind, PathGenericsSource,
PathLoweringDiagnostic, TyDefId, ValueTyDefId,
consteval_nextsolver::{unknown_const, unknown_const_as_generic},
consteval::{unknown_const, unknown_const_as_generic},
db::HirDatabase,
generics::{Generics, generics},
lower::PathDiagnosticCallbackData,
@ -51,15 +51,17 @@ use super::{
const_param_ty_query, ty_query,
};
type CallbackData<'a> =
Either<PathDiagnosticCallbackData, crate::infer::diagnostics::PathDiagnosticCallbackData<'a>>;
type CallbackData<'a, 'db> = Either<
PathDiagnosticCallbackData,
crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>,
>;
// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
pub(crate) struct PathDiagnosticCallback<'a, 'db> {
pub(crate) data: CallbackData<'a>,
pub(crate) data: CallbackData<'a, 'db>,
pub(crate) callback:
fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
}
pub(crate) struct PathLoweringContext<'a, 'b, 'db> {
@ -155,13 +157,14 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
ty: Ty<'db>,
// We need the original resolution to lower `Self::AssocTy` correctly
res: Option<TypeNs>,
infer_args: bool,
) -> (Ty<'db>, Option<TypeNs>) {
let remaining_segments = self.segments.len() - self.current_segment_idx;
match remaining_segments {
0 => (ty, res),
1 => {
// resolve unselected assoc types
(self.select_associated_type(res), None)
(self.select_associated_type(res, infer_args), None)
}
_ => {
// FIXME report error (ambiguous associated type)
@ -204,6 +207,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
let trait_ref = self.lower_trait_ref_from_resolved_path(
trait_,
Ty::new_error(self.ctx.interner, ErrorGuaranteed),
false,
);
tracing::debug!(?trait_ref);
self.skip_resolved_segment();
@ -276,8 +280,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
GenericParamDataRef::TypeParamData(p) => p,
_ => unreachable!(),
};
Ty::new_param(
self.ctx.interner,
self.ctx.type_param(
param_id,
idx as u32,
p.name
@ -293,7 +296,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
self.ctx.interner,
adt.into(),
);
Ty::new_adt(self.ctx.interner, AdtDef::new(adt, self.ctx.interner), args)
Ty::new_adt(self.ctx.interner, adt, args)
}
TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args),
@ -308,7 +311,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
tracing::debug!(?ty);
self.skip_resolved_segment();
self.lower_ty_relative_path(ty, Some(resolution))
self.lower_ty_relative_path(ty, Some(resolution), infer_args)
}
fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) {
@ -480,14 +483,19 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
// and statics can be generic, or just because it was easier for rustc implementors.
// That means we'll show the wrong error code. Because of us it's easier to do it
// this way :)
ValueNs::GenericParam(_) | ValueNs::ConstId(_) => {
ValueNs::GenericParam(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
}
ValueNs::StaticId(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
}
ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {}
ValueNs::LocalBinding(_) => {}
ValueNs::LocalBinding(_) => {
prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable)
}
ValueNs::FunctionId(_)
| ValueNs::StructId(_)
| ValueNs::EnumVariantId(_)
| ValueNs::ConstId(_) => {}
}
}
ResolveValueResult::Partial(resolution, _, _) => {
@ -498,7 +506,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
#[tracing::instrument(skip(self), ret)]
fn select_associated_type(&mut self, res: Option<TypeNs>) -> Ty<'db> {
fn select_associated_type(&mut self, res: Option<TypeNs>, infer_args: bool) -> Ty<'db> {
let interner = self.ctx.interner;
let Some(res) = res else {
return Ty::new_error(self.ctx.interner, ErrorGuaranteed);
@ -516,7 +524,8 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`t.substitution`).
let substs = self.substs_from_path_segment(associated_ty.into(), false, None, true);
let substs =
self.substs_from_path_segment(associated_ty.into(), infer_args, None, true);
let substs = crate::next_solver::GenericArgs::new_from_iter(
interner,
@ -541,7 +550,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty<'db> {
let generic_def = match typeable {
TyDefId::BuiltinType(builtinty) => return builtin(self.ctx.interner, builtinty),
TyDefId::BuiltinType(builtinty) => {
return Ty::from_builtin_type(self.ctx.interner, builtinty);
}
TyDefId::AdtId(it) => it.into(),
TyDefId::TypeAliasId(it) => it.into(),
};
@ -715,12 +726,12 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
param: GenericParamDataRef<'_>,
arg: &GenericArg,
) -> crate::next_solver::GenericArg<'db> {
match (param, arg) {
match (param, *arg) {
(GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => {
self.ctx.ctx.lower_lifetime(*lifetime).into()
self.ctx.ctx.lower_lifetime(lifetime).into()
}
(GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => {
self.ctx.ctx.lower_ty(*type_ref).into()
self.ctx.ctx.lower_ty(type_ref).into()
}
(GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => {
let GenericParamId::ConstParamId(const_id) = param_id else {
@ -859,8 +870,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
&mut self,
resolved: TraitId,
explicit_self_ty: Ty<'db>,
infer_args: bool,
) -> TraitRef<'db> {
let args = self.trait_ref_substs_from_path(resolved, explicit_self_ty);
let args = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args);
TraitRef::new_from_args(self.ctx.interner, resolved.into(), args)
}
@ -868,8 +880,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
&mut self,
resolved: TraitId,
explicit_self_ty: Ty<'db>,
infer_args: bool,
) -> crate::next_solver::GenericArgs<'db> {
self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty), false)
self.substs_from_path_segment(resolved.into(), infer_args, Some(explicit_self_ty), false)
}
pub(super) fn assoc_type_bindings_from_type_bound<'c>(
@ -1039,8 +1052,12 @@ fn check_generic_args_len<'db>(
}
let lifetime_args_len = def_generics.len_lifetimes_self();
if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics {
// In generic associated types, we never allow inferring the lifetimes.
if provided_lifetimes_count == 0
&& lifetime_args_len > 0
&& (!lowering_assoc_type_generics || infer_args)
{
// In generic associated types, we never allow inferring the lifetimes, but only in type context, that is
// when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs.
match lifetime_elision {
&LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => {
ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path);
@ -1335,42 +1352,3 @@ fn unknown_subst<'db>(
}),
)
}
pub(crate) fn builtin<'db>(interner: DbInterner<'db>, builtin: BuiltinType) -> Ty<'db> {
match builtin {
BuiltinType::Char => Ty::new(interner, rustc_type_ir::TyKind::Char),
BuiltinType::Bool => Ty::new_bool(interner),
BuiltinType::Str => Ty::new(interner, rustc_type_ir::TyKind::Str),
BuiltinType::Int(t) => {
let int_ty = match primitive::int_ty_from_builtin(t) {
chalk_ir::IntTy::Isize => rustc_type_ir::IntTy::Isize,
chalk_ir::IntTy::I8 => rustc_type_ir::IntTy::I8,
chalk_ir::IntTy::I16 => rustc_type_ir::IntTy::I16,
chalk_ir::IntTy::I32 => rustc_type_ir::IntTy::I32,
chalk_ir::IntTy::I64 => rustc_type_ir::IntTy::I64,
chalk_ir::IntTy::I128 => rustc_type_ir::IntTy::I128,
};
Ty::new_int(interner, int_ty)
}
BuiltinType::Uint(t) => {
let uint_ty = match primitive::uint_ty_from_builtin(t) {
chalk_ir::UintTy::Usize => rustc_type_ir::UintTy::Usize,
chalk_ir::UintTy::U8 => rustc_type_ir::UintTy::U8,
chalk_ir::UintTy::U16 => rustc_type_ir::UintTy::U16,
chalk_ir::UintTy::U32 => rustc_type_ir::UintTy::U32,
chalk_ir::UintTy::U64 => rustc_type_ir::UintTy::U64,
chalk_ir::UintTy::U128 => rustc_type_ir::UintTy::U128,
};
Ty::new_uint(interner, uint_ty)
}
BuiltinType::Float(t) => {
let float_ty = match primitive::float_ty_from_builtin(t) {
chalk_ir::FloatTy::F16 => rustc_type_ir::FloatTy::F16,
chalk_ir::FloatTy::F32 => rustc_type_ir::FloatTy::F32,
chalk_ir::FloatTy::F64 => rustc_type_ir::FloatTy::F64,
chalk_ir::FloatTy::F128 => rustc_type_ir::FloatTy::F128,
};
Ty::new_float(interner, float_ty)
}
}
}

View file

@ -2,18 +2,7 @@
use std::{collections::hash_map::Entry, fmt::Display, iter};
use crate::{
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
Substitution, TraitEnvironment, Ty, TyExt, TyKind,
consteval::usize_const,
db::HirDatabase,
display::{DisplayTarget, HirDisplay},
infer::{PointerCast, normalize},
lang_items::is_box,
mapping::ToChalk,
};
use base_db::Crate;
use chalk_ir::Mutability;
use either::Either;
use hir_def::{
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
@ -21,6 +10,25 @@ use hir_def::{
hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
use crate::{
CallableDefId, InferenceResult, MemoryMap,
consteval::usize_const,
db::{HirDatabase, InternedClosureId},
display::{DisplayTarget, HirDisplay},
infer::PointerCast,
lang_items::is_box,
next_solver::{
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
};
mod borrowck;
mod eval;
@ -36,25 +44,22 @@ pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_bod
pub use monomorphization::{
monomorphized_mir_body_for_closure_query, monomorphized_mir_body_query,
};
use rustc_hash::FxHashMap;
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
pub(crate) use lower::mir_body_cycle_result;
pub(crate) use monomorphization::monomorphized_mir_body_cycle_result;
use super::consteval::{intern_const_scalar, try_const_usize};
use super::consteval::try_const_usize;
pub type BasicBlockId = Idx<BasicBlock>;
pub type LocalId = Idx<Local>;
pub type BasicBlockId<'db> = Idx<BasicBlock<'db>>;
pub type LocalId<'db> = Idx<Local<'db>>;
fn return_slot() -> LocalId {
fn return_slot<'db>() -> LocalId<'db> {
LocalId::from_raw(RawIdx::from(0))
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local {
pub ty: Ty,
pub struct Local<'db> {
pub ty: Ty<'db>,
}
/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
@ -76,19 +81,19 @@ pub struct Local {
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Operand {
kind: OperandKind,
pub struct Operand<'db> {
kind: OperandKind<'db>,
// FIXME : This should actually just be of type `MirSpan`.
span: Option<MirSpan>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum OperandKind {
pub enum OperandKind<'db> {
/// Creates a value by loading the given place.
///
/// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
/// is no such requirement.
Copy(Place),
Copy(Place<'db>),
/// Creates a value by performing loading the place, just like the `Copy` operand.
///
@ -97,41 +102,41 @@ pub enum OperandKind {
/// place without first re-initializing it.
///
/// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
Move(Place),
Move(Place<'db>),
/// Constants are already semantically values, and remain unchanged.
Constant(Const),
Constant { konst: Const<'db>, ty: Ty<'db> },
/// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
/// handles it with the `Constant` variant somehow.
Static(StaticId),
}
impl Operand {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'static>, ty: Ty) -> Self {
impl<'db> Operand<'db> {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self {
let interner = DbInterner::conjure();
Operand {
kind: OperandKind::Constant(intern_const_scalar(
ConstScalar::Bytes(data, memory_map),
kind: OperandKind::Constant {
konst: Const::new_valtree(interner, ty, data, memory_map),
ty,
)),
},
span: None,
}
}
fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self {
fn from_bytes(data: Box<[u8]>, ty: Ty<'db>) -> Self {
Operand::from_concrete_const(data, MemoryMap::default(), ty)
}
fn const_zst(ty: Ty) -> Operand {
fn const_zst(ty: Ty<'db>) -> Operand<'db> {
Self::from_bytes(Box::default(), ty)
}
fn from_fn(
db: &dyn HirDatabase,
db: &'db dyn HirDatabase,
func_id: hir_def::FunctionId,
generic_args: Substitution,
) -> Operand {
let ty =
chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
.intern(Interner);
generic_args: GenericArgs<'db>,
) -> Operand<'db> {
let interner = DbInterner::new_with(db, None, None);
let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
Operand::from_bytes(Box::default(), ty)
}
}
@ -150,83 +155,81 @@ pub enum ProjectionElem<V, T> {
}
impl<V, T> ProjectionElem<V, T> {
pub fn projected_ty(
pub fn projected_ty<'db>(
&self,
mut base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
infcx: &InferCtxt<'db>,
mut base: Ty<'db>,
closure_field: impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db>,
krate: Crate,
) -> Ty {
) -> Ty<'db> {
let interner = infcx.interner;
let db = interner.db;
// we only bail on mir building when there are type mismatches
// but error types may pop up resulting in us still attempting to build the mir
// so just propagate the error type
if base.is_unknown() {
return TyKind::Error.intern(Interner);
if base.is_ty_error() {
return Ty::new_error(interner, ErrorGuaranteed);
}
if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
base = normalize(
db,
// FIXME: we should get this from caller
TraitEnvironment::empty(krate),
base,
);
if matches!(base.kind(), TyKind::Alias(..)) {
let mut ocx = ObligationCtxt::new(infcx);
// FIXME: we should get this from caller
let env = ParamEnv::empty();
match ocx.structurally_normalize_ty(&ObligationCause::dummy(), env, base) {
Ok(it) => base = it,
Err(_) => return Ty::new_error(interner, ErrorGuaranteed),
}
}
match self {
ProjectionElem::Deref => match &base.kind(Interner) {
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
TyKind::Adt(adt, subst) if is_box(db, adt.0) => {
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
}
ProjectionElem::Deref => match base.kind() {
TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
TyKind::Adt(adt_def, subst) if is_box(db, adt_def.def_id().0) => subst.type_at(0),
_ => {
never!(
"Overloaded deref on type {} is not a projection",
base.display(db, DisplayTarget::from_crate(db, krate))
);
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
},
ProjectionElem::Field(Either::Left(f)) => match base.kind(Interner) {
ProjectionElem::Field(Either::Left(f)) => match base.kind() {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
db.field_types_ns(f.parent)[f.local_id].instantiate(interner, subst)
}
ty => {
never!("Only adt has field, found {:?}", ty);
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
},
ProjectionElem::Field(Either::Right(f)) => match &base.kind(Interner) {
TyKind::Tuple(_, subst) => subst
.as_slice(Interner)
.get(f.index as usize)
.map(|x| x.assert_ty_ref(Interner))
.cloned()
.unwrap_or_else(|| {
ProjectionElem::Field(Either::Right(f)) => match base.kind() {
TyKind::Tuple(subst) => {
subst.as_slice().get(f.index as usize).copied().unwrap_or_else(|| {
never!("Out of bound tuple field");
TyKind::Error.intern(Interner)
}),
Ty::new_error(interner, ErrorGuaranteed)
})
}
ty => {
never!("Only tuple has tuple field: {:?}", ty);
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
},
ProjectionElem::ClosureField(f) => match &base.kind(Interner) {
TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
ProjectionElem::ClosureField(f) => match base.kind() {
TyKind::Closure(id, subst) => closure_field(id.0, subst, *f),
_ => {
never!("Only closure has closure field");
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
},
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
match &base.kind(Interner) {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
TyKind::Error.intern(Interner)
}
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => match base.kind() {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner,
_ => {
never!("Overloaded index is not a projection");
Ty::new_error(interner, ErrorGuaranteed)
}
}
&ProjectionElem::Subslice { from, to } => match &base.kind(Interner) {
},
&ProjectionElem::Subslice { from, to } => match base.kind() {
TyKind::Array(inner, c) => {
let next_c = usize_const(
db,
@ -236,34 +239,34 @@ impl<V, T> ProjectionElem<V, T> {
},
krate,
);
TyKind::Array(inner.clone(), next_c).intern(Interner)
Ty::new_array_with_const_len(interner, inner, next_c)
}
TyKind::Slice(_) => base.clone(),
TyKind::Slice(_) => base,
_ => {
never!("Subslice projection should only happen on slice and array");
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
},
ProjectionElem::OpaqueCast(_) => {
never!("We don't emit these yet");
TyKind::Error.intern(Interner)
Ty::new_error(interner, ErrorGuaranteed)
}
}
}
}
type PlaceElem = ProjectionElem<LocalId, Ty>;
type PlaceElem<'db> = ProjectionElem<LocalId<'db>, Ty<'db>>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProjectionStore {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem]>>,
proj_to_id: FxHashMap<Box<[PlaceElem]>, ProjectionId>,
pub struct ProjectionStore<'db> {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem<'db>]>>,
proj_to_id: FxHashMap<Box<[PlaceElem<'db>]>, ProjectionId>,
}
impl Default for ProjectionStore {
impl Default for ProjectionStore<'_> {
fn default() -> Self {
let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
// Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
@ -272,17 +275,17 @@ impl Default for ProjectionStore {
}
}
impl ProjectionStore {
impl<'db> ProjectionStore<'db> {
pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@ -303,11 +306,15 @@ impl ProjectionId {
self == ProjectionId::EMPTY
}
pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] {
store.id_to_proj.get(&self).unwrap()
}
pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
pub fn project<'db>(
self,
projection: PlaceElem<'db>,
store: &mut ProjectionStore<'db>,
) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
@ -315,13 +322,13 @@ impl ProjectionId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub struct Place<'db> {
pub local: LocalId<'db>,
pub projection: ProjectionId,
}
impl Place {
fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
impl<'db> Place<'db> {
fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
self.local == child.local
&& child.projection.lookup(store).starts_with(self.projection.lookup(store))
}
@ -329,39 +336,39 @@ impl Place {
/// The place itself is not included
fn iterate_over_parents<'a>(
&'a self,
store: &'a ProjectionStore,
) -> impl Iterator<Item = Place> + 'a {
store: &'a ProjectionStore<'db>,
) -> impl Iterator<Item = Place<'db>> + 'a {
let projection = self.projection.lookup(store);
(0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
})
}
fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place {
fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> {
Place { local: self.local, projection: self.projection.project(projection, store) }
}
}
impl From<LocalId> for Place {
fn from(local: LocalId) -> Self {
impl<'db> From<LocalId<'db>> for Place<'db> {
fn from(local: LocalId<'db>) -> Self {
Self { local, projection: ProjectionId::EMPTY }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum AggregateKind {
pub enum AggregateKind<'db> {
/// The type is of the element
Array(Ty),
Array(Ty<'db>),
/// The type is of the tuple
Tuple(Ty),
Adt(VariantId, Substitution),
Tuple(Ty<'db>),
Adt(VariantId, GenericArgs<'db>),
Union(UnionId, FieldId),
Closure(Ty),
Closure(Ty<'db>),
//Coroutine(LocalDefId, SubstsRef, Movability),
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SwitchTargets {
pub struct SwitchTargets<'db> {
/// Possible values. The locations to branch to in each case
/// are found in the corresponding indices from the `targets` vector.
values: SmallVec<[u128; 1]>,
@ -378,17 +385,17 @@ pub struct SwitchTargets {
//
// However weve decided to keep this as-is until we figure a case
// where some other approach seems to be strictly better than other.
targets: SmallVec<[BasicBlockId; 2]>,
targets: SmallVec<[BasicBlockId<'db>; 2]>,
}
impl SwitchTargets {
impl<'db> SwitchTargets<'db> {
/// Creates switch targets from an iterator of values and target blocks.
///
/// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
/// `goto otherwise;`.
pub fn new(
targets: impl Iterator<Item = (u128, BasicBlockId)>,
otherwise: BasicBlockId,
targets: impl Iterator<Item = (u128, BasicBlockId<'db>)>,
otherwise: BasicBlockId<'db>,
) -> Self {
let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
targets.push(otherwise);
@ -397,12 +404,12 @@ impl SwitchTargets {
/// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
/// and to `else_` if not.
pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self {
Self { values: smallvec![value], targets: smallvec![then, else_] }
}
/// Returns the fallback target that is jumped to when none of the values match the operand.
pub fn otherwise(&self) -> BasicBlockId {
pub fn otherwise(&self) -> BasicBlockId<'db> {
*self.targets.last().unwrap()
}
@ -412,33 +419,33 @@ impl SwitchTargets {
/// including the `otherwise` fallback target.
///
/// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId<'db>)> + '_ {
iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
}
/// Returns a slice with all possible jump targets (including the fallback target).
pub fn all_targets(&self) -> &[BasicBlockId] {
pub fn all_targets(&self) -> &[BasicBlockId<'db>] {
&self.targets
}
/// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
/// specific value. This cannot fail, as it'll return the `otherwise`
/// branch if there's not a specific match for the value.
pub fn target_for_value(&self, value: u128) -> BasicBlockId {
pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> {
self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator {
pub struct Terminator<'db> {
pub span: MirSpan,
pub kind: TerminatorKind,
pub kind: TerminatorKind<'db>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind {
pub enum TerminatorKind<'db> {
/// Block has one successor; we continue execution there.
Goto { target: BasicBlockId },
Goto { target: BasicBlockId<'db> },
/// Switches based on the computed value.
///
@ -450,9 +457,9 @@ pub enum TerminatorKind {
/// Target values may not appear more than once.
SwitchInt {
/// The discriminant value being tested.
discr: Operand,
discr: Operand<'db>,
targets: SwitchTargets,
targets: SwitchTargets<'db>,
},
/// Indicates that the landing pad is finished and that the process should continue unwinding.
@ -503,7 +510,7 @@ pub enum TerminatorKind {
/// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
/// > the place or one of its "parents" occurred more recently than a move out of it. This does not
/// > consider indirect assignments.
Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option<BasicBlockId<'db>> },
/// Drops the place and assigns a new value to it.
///
@ -536,10 +543,10 @@ pub enum TerminatorKind {
///
/// Disallowed after drop elaboration.
DropAndReplace {
place: Place,
value: Operand,
target: BasicBlockId,
unwind: Option<BasicBlockId>,
place: Place<'db>,
value: Operand<'db>,
target: BasicBlockId<'db>,
unwind: Option<BasicBlockId<'db>>,
},
/// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
@ -554,18 +561,18 @@ pub enum TerminatorKind {
/// [#71117]: https://github.com/rust-lang/rust/issues/71117
Call {
/// The function thats being called.
func: Operand,
func: Operand<'db>,
/// Arguments the function is called with.
/// These are owned by the callee, which is free to modify them.
/// This allows the memory occupied by "by-value" arguments to be
/// reused across function calls without duplicating the contents.
args: Box<[Operand]>,
args: Box<[Operand<'db>]>,
/// Where the returned value will be written
destination: Place,
destination: Place<'db>,
/// Where to go after this call returns. If none, the call necessarily diverges.
target: Option<BasicBlockId>,
target: Option<BasicBlockId<'db>>,
/// Cleanups to be done if the call unwinds.
cleanup: Option<BasicBlockId>,
cleanup: Option<BasicBlockId<'db>>,
/// `true` if this is from a call in HIR rather than from an overloaded
/// operator. True for overloaded function call.
from_hir_call: bool,
@ -581,11 +588,11 @@ pub enum TerminatorKind {
/// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
/// assertion does not fail, execution continues at the specified basic block.
Assert {
cond: Operand,
cond: Operand<'db>,
expected: bool,
//msg: AssertMessage,
target: BasicBlockId,
cleanup: Option<BasicBlockId>,
target: BasicBlockId<'db>,
cleanup: Option<BasicBlockId<'db>>,
},
/// Marks a suspend point.
@ -602,13 +609,13 @@ pub enum TerminatorKind {
/// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
Yield {
/// The value to return.
value: Operand,
value: Operand<'db>,
/// Where to resume to.
resume: BasicBlockId,
resume: BasicBlockId<'db>,
/// The place to store the resume argument in.
resume_arg: Place,
resume_arg: Place<'db>,
/// Cleanup to be done if the coroutine is dropped at this suspend point.
drop: Option<BasicBlockId>,
drop: Option<BasicBlockId<'db>>,
},
/// Indicates the end of dropping a coroutine.
@ -631,10 +638,10 @@ pub enum TerminatorKind {
/// Disallowed after drop elaboration.
FalseEdge {
/// The target normal control flow will take.
real_target: BasicBlockId,
real_target: BasicBlockId<'db>,
/// A block control flow could conceptually jump to, but won't in
/// practice.
imaginary_target: BasicBlockId,
imaginary_target: BasicBlockId<'db>,
},
/// A terminator for blocks that only take one path in reality, but where we reserve the right
@ -646,14 +653,14 @@ pub enum TerminatorKind {
/// Disallowed after drop elaboration.
FalseUnwind {
/// The target normal control flow will take.
real_target: BasicBlockId,
real_target: BasicBlockId<'db>,
/// The imaginary cleanup block link. This particular path will never be taken
/// in practice, but in order to avoid fragility we want to always
/// consider it in borrowck. We don't want to accept programs which
/// pass borrowck only when `panic=abort` or some assertions are disabled
/// due to release vs. debug mode builds. This needs to be an `Option` because
/// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
unwind: Option<BasicBlockId>,
unwind: Option<BasicBlockId<'db>>,
},
}
@ -708,10 +715,10 @@ impl BorrowKind {
}
}
fn from_chalk(m: Mutability) -> Self {
fn from_rustc(m: rustc_ast_ir::Mutability) -> Self {
match m {
Mutability::Not => BorrowKind::Shared,
Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
rustc_ast_ir::Mutability::Not => BorrowKind::Shared,
rustc_ast_ir::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
}
}
}
@ -840,8 +847,8 @@ impl From<hir_def::hir::CmpOp> for BinOp {
}
}
impl From<Operand> for Rvalue {
fn from(x: Operand) -> Self {
impl<'db> From<Operand<'db>> for Rvalue<'db> {
fn from(x: Operand<'db>) -> Self {
Self::Use(x)
}
}
@ -870,14 +877,14 @@ pub enum CastKind {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Rvalue {
pub enum Rvalue<'db> {
/// Yields the operand unchanged
Use(Operand),
Use(Operand<'db>),
/// Creates an array where each element is the value of the operand.
///
/// Corresponds to source code like `[x; 32]`.
Repeat(Operand, Const),
Repeat(Operand<'db>, Const<'db>),
/// Creates a reference of the indicated kind to the place.
///
@ -886,7 +893,7 @@ pub enum Rvalue {
/// exactly what the behavior of this operation should be.
///
/// `Shallow` borrows are disallowed after drop lowering.
Ref(BorrowKind, Place),
Ref(BorrowKind, Place<'db>),
/// Creates a pointer/reference to the given thread local.
///
@ -917,7 +924,7 @@ pub enum Rvalue {
/// If the type of the place is an array, this is the array length. For slices (`[T]`, not
/// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
/// ill-formed for places of other types.
Len(Place),
Len(Place<'db>),
/// Performs essentially all of the casts that can be performed via `as`.
///
@ -925,7 +932,7 @@ pub enum Rvalue {
///
/// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
/// `ArrayToPointer` and `MutToConstPointer` are special.
Cast(CastKind, Operand, Ty),
Cast(CastKind, Operand<'db>, Ty<'db>),
// FIXME link to `pointer::offset` when it hits stable.
/// * `Offset` has the same semantics as `pointer::offset`, except that the second
@ -957,7 +964,7 @@ pub enum Rvalue {
/// when the value of right-hand side is negative.
///
/// Other combinations of types and operators are unsupported.
CheckedBinaryOp(BinOp, Operand, Operand),
CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>),
/// Computes a value as described by the operation.
//NullaryOp(NullOp, Ty),
@ -968,7 +975,7 @@ pub enum Rvalue {
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
/// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
/// return a value with the same type as their operand.
UnaryOp(UnOp, Operand),
UnaryOp(UnOp, Operand<'db>),
/// Computes the discriminant of the place, returning it as an integer of type
/// [`discriminant_ty`]. Returns zero for types without discriminant.
@ -980,7 +987,7 @@ pub enum Rvalue {
/// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
/// [#91095]: https://github.com/rust-lang/rust/issues/91095
/// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
Discriminant(Place),
Discriminant(Place<'db>),
/// Creates an aggregate value, like a tuple or struct.
///
@ -990,17 +997,17 @@ pub enum Rvalue {
///
/// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
/// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
Aggregate(AggregateKind, Box<[Operand]>),
Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>),
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
///
/// This is different from a normal transmute because dataflow analysis will treat the box as
/// initialized but its content as uninitialized. Like other pointer casts, this in general
/// affects alias analysis.
ShallowInitBox(Operand, Ty),
ShallowInitBox(Operand<'db>, Ty<'db>),
/// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
ShallowInitBoxWithAlloc(Ty),
ShallowInitBoxWithAlloc(Ty<'db>),
/// A CopyForDeref is equivalent to a read from a place at the
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
@ -1010,41 +1017,41 @@ pub enum Rvalue {
/// read never happened and just projects further. This allows simplifying various MIR
/// optimizations and codegen backends that previously had to handle deref operations anywhere
/// in a place.
CopyForDeref(Place),
CopyForDeref(Place<'db>),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind {
Assign(Place, Rvalue),
FakeRead(Place),
pub enum StatementKind<'db> {
Assign(Place<'db>, Rvalue<'db>),
FakeRead(Place<'db>),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
//},
Deinit(Place),
StorageLive(LocalId),
StorageDead(LocalId),
Deinit(Place<'db>),
StorageLive(LocalId<'db>),
StorageDead(LocalId<'db>),
//Retag(RetagKind, Box<Place>),
//AscribeUserType(Place, UserTypeProjection, Variance),
//Intrinsic(Box<NonDivergingIntrinsic>),
Nop,
}
impl StatementKind {
fn with_span(self, span: MirSpan) -> Statement {
impl<'db> StatementKind<'db> {
fn with_span(self, span: MirSpan) -> Statement<'db> {
Statement { kind: self, span }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Statement {
pub kind: StatementKind,
pub struct Statement<'db> {
pub kind: StatementKind<'db>,
pub span: MirSpan,
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock {
pub struct BasicBlock<'db> {
/// List of statements in this block.
pub statements: Vec<Statement>,
pub statements: Vec<Statement<'db>>,
/// Terminator for this block.
///
@ -1054,7 +1061,7 @@ pub struct BasicBlock {
/// exception is that certain passes, such as `simplify_cfg`, swap
/// out the terminator temporarily with `None` while they continue
/// to recurse over the set of basic blocks.
pub terminator: Option<Terminator>,
pub terminator: Option<Terminator<'db>>,
/// If true, this block lies on an unwind path. This is used
/// during codegen where distinct kinds of basic blocks may be
@ -1064,35 +1071,35 @@ pub struct BasicBlock {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody {
pub projection_store: ProjectionStore,
pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>,
pub start_block: BasicBlockId,
pub struct MirBody<'db> {
pub projection_store: ProjectionStore<'db>,
pub basic_blocks: Arena<BasicBlock<'db>>,
pub locals: Arena<Local<'db>>,
pub start_block: BasicBlockId<'db>,
pub owner: DefWithBodyId,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub param_locals: Vec<LocalId>,
pub binding_locals: ArenaMap<BindingId, LocalId<'db>>,
pub param_locals: Vec<LocalId<'db>>,
/// This field stores the closures directly owned by this body. It is used
/// in traversing every mir body.
pub closures: Vec<ClosureId>,
pub closures: Vec<InternedClosureId>,
}
impl MirBody {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
impl<'db> MirBody<'db> {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId<'db>, BindingId> {
self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
}
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
store: &mut ProjectionStore,
fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) {
fn for_operand<'db>(
op: &mut Operand<'db>,
f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>),
store: &mut ProjectionStore<'db>,
) {
match &mut op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
f(p, store);
}
OperandKind::Constant(_) | OperandKind::Static(_) => (),
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
}
}
for (_, block) in self.basic_blocks.iter_mut() {

View file

@ -12,11 +12,14 @@ use stdx::never;
use triomphe::Arc;
use crate::{
ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
db::{HirDatabase, InternedClosure},
TraitEnvironment,
db::{HirDatabase, InternedClosure, InternedClosureId},
display::DisplayTarget,
mir::OperandKind,
utils::ClosureSubst,
next_solver::{
DbInterner, GenericArgs, SolverDefIds, Ty, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
};
use super::{
@ -33,45 +36,45 @@ pub enum MutabilityReason {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MovedOutOfRef {
pub ty: Ty,
pub struct MovedOutOfRef<'db> {
pub ty: Ty<'db>,
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved {
pub ty: Ty,
pub struct PartiallyMoved<'db> {
pub ty: Ty<'db>,
pub span: MirSpan,
pub local: LocalId,
pub local: LocalId<'db>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion {
pub local: LocalId,
pub struct BorrowRegion<'db> {
pub local: LocalId<'db>,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
pub struct BorrowckResult<'db> {
pub mir_body: Arc<MirBody<'db>>,
pub mutability_of_locals: ArenaMap<LocalId<'db>, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef<'db>>,
pub partially_moved: Vec<PartiallyMoved<'db>>,
pub borrow_regions: Vec<BorrowRegion<'db>>,
}
fn all_mir_bodies(
db: &dyn HirDatabase,
fn all_mir_bodies<'db>(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
mut cb: impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
fn for_closure(
db: &dyn HirDatabase,
c: ClosureId,
cb: &mut impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
match db.mir_body_for_closure(c.into()) {
mut cb: impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
fn for_closure<'db>(
db: &'db dyn HirDatabase,
c: InternedClosureId,
cb: &mut impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
match db.mir_body_for_closure(c) {
Ok(body) => {
cb(body.clone());
body.closures.iter().try_for_each(|&it| for_closure(db, it, cb))
@ -88,17 +91,24 @@ fn all_mir_bodies(
}
}
pub fn borrowck_query(
db: &dyn HirDatabase,
pub fn borrowck_query<'db>(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>> {
let _p = tracing::info_span!("borrowck_query").entered();
let module = def.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
let env = db.trait_environment_for_body(def);
let mut res = vec![];
all_mir_bodies(db, def, |body| {
// FIXME(next-solver): Opaques.
let infcx = interner.infer_ctxt().build(TypingMode::Borrowck {
defining_opaque_types: SolverDefIds::new_from_iter(interner, []),
});
res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body),
partially_moved: partially_moved(db, &body),
mutability_of_locals: mutability_of_locals(&infcx, &body),
moved_out_of_ref: moved_out_of_ref(&infcx, &env, &body),
partially_moved: partially_moved(&infcx, &env, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body,
});
@ -106,48 +116,49 @@ pub fn borrowck_query(
Ok(res.into())
}
fn make_fetch_closure_field(
db: &dyn HirDatabase,
) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ {
|c: ClosureId, subst: &Substitution, f: usize| {
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
fn make_fetch_closure_field<'db>(
db: &'db dyn HirDatabase,
) -> impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db> + use<'db> {
|c: InternedClosureId, subst: GenericArgs<'db>, f: usize| {
let InternedClosure(def, _) = db.lookup_intern_closure(c);
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst(db);
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, &parent_subst)
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.split_closure_args_untupled().parent_args;
let interner = DbInterner::new_with(db, None, None);
captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst)
}
}
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
fn moved_out_of_ref<'db>(
infcx: &InferCtxt<'db>,
env: &TraitEnvironment<'db>,
body: &MirBody<'db>,
) -> Vec<MovedOutOfRef<'db>> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
let mut ty: Ty<'db> = body.locals[p.local].ty;
let mut is_dereference_of_ref = false;
for proj in p.projection.lookup(&body.projection_store) {
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
is_dereference_of_ref = true;
}
ty = proj.projected_ty(
infcx,
ty,
db,
make_fetch_closure_field(db),
body.owner.module(db).krate(),
);
}
if is_dereference_of_ref
&& !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
&& !infcx.type_is_copy_modulo_regions(env.env, ty)
&& !ty.references_non_lt_error()
{
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty });
}
}
OperandKind::Constant(_) | OperandKind::Static(_) => (),
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_revision_cancelled();
@ -218,26 +229,29 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
result
}
fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
fn partially_moved<'db>(
infcx: &InferCtxt<'db>,
env: &TraitEnvironment<'db>,
body: &MirBody<'db>,
) -> Vec<PartiallyMoved<'db>> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
let mut ty: Ty<'db> = body.locals[p.local].ty;
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
infcx,
ty,
db,
make_fetch_closure_field(db),
body.owner.module(db).krate(),
);
}
if !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
{
if !infcx.type_is_copy_modulo_regions(env.env, ty) && !ty.references_non_lt_error() {
result.push(PartiallyMoved { span, ty, local: p.local });
}
}
OperandKind::Constant(_) | OperandKind::Static(_) => (),
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_revision_cancelled();
@ -308,7 +322,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
result
}
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec<BorrowRegion<'db>> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_revision_cancelled();
@ -316,7 +330,7 @@ fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion| {
.and_modify(|it: &mut BorrowRegion<'db>| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
@ -358,9 +372,14 @@ enum ProjectionCase {
Indirect,
}
fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> ProjectionCase {
fn place_case<'db>(
infcx: &InferCtxt<'db>,
body: &MirBody<'db>,
lvalue: &Place<'db>,
) -> ProjectionCase {
let db = infcx.interner.db;
let mut is_part_of = false;
let mut ty = body.locals[lvalue.local].ty.clone();
let mut ty = body.locals[lvalue.local].ty;
for proj in lvalue.projection.lookup(&body.projection_store).iter() {
match proj {
ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw
@ -374,7 +393,12 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
}
ProjectionElem::OpaqueCast(_) => (),
}
ty = proj.projected_ty(ty, db, make_fetch_closure_field(db), body.owner.module(db).krate());
ty = proj.projected_ty(
infcx,
ty,
make_fetch_closure_field(db),
body.owner.module(db).krate(),
);
}
if is_part_of { ProjectionCase::DirectPart } else { ProjectionCase::Direct }
}
@ -382,18 +406,18 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similar after initialization.
fn ever_initialized_map(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
fn ever_initialized_map<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
) -> ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> {
let mut result: ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> =
body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
l: LocalId,
stack: &mut Vec<BasicBlockId>,
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
fn dfs<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
l: LocalId<'db>,
stack: &mut Vec<BasicBlockId<'db>>,
result: &mut ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>>,
) {
while let Some(b) = stack.pop() {
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
@ -481,7 +505,11 @@ fn ever_initialized_map(
result
}
fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
fn push_mut_span<'db>(
local: LocalId<'db>,
span: MirSpan,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
@ -490,23 +518,27 @@ fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, M
};
}
fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
fn record_usage<'db>(local: LocalId<'db>, result: &mut ArenaMap<LocalId<'db>, MutabilityReason>) {
if let it @ MutabilityReason::Unused = &mut result[local] {
*it = MutabilityReason::Not;
};
}
fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
fn record_usage_for_operand<'db>(
arg: &Operand<'db>,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind {
record_usage(p.local, result);
}
}
fn mutability_of_locals(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
fn mutability_of_locals<'db>(
infcx: &InferCtxt<'db>,
body: &MirBody<'db>,
) -> ArenaMap<LocalId<'db>, MutabilityReason> {
let db = infcx.interner.db;
let mut result: ArenaMap<LocalId<'db>, MutabilityReason> =
body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
let ever_init_maps = ever_initialized_map(db, body);
@ -515,7 +547,7 @@ fn mutability_of_locals(
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(place, value) => {
match place_case(db, body, place) {
match place_case(infcx, body, place) {
ProjectionCase::Direct => {
if ever_init_map.get(place.local).copied().unwrap_or_default() {
push_mut_span(place.local, statement.span, &mut result);
@ -564,7 +596,7 @@ fn mutability_of_locals(
},
p,
) = value
&& place_case(db, body, p) != ProjectionCase::Indirect
&& place_case(infcx, body, p) != ProjectionCase::Indirect
{
push_mut_span(p.local, statement.span, &mut result);
}

File diff suppressed because it is too large Load diff

View file

@ -3,32 +3,21 @@
//!
use std::cmp::{self, Ordering};
use chalk_ir::TyKind;
use hir_def::signatures::FunctionSignature;
use hir_def::{
CrateRootModuleId,
builtin_type::{BuiltinInt, BuiltinUint},
resolver::HasResolver,
};
use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature};
use hir_expand::name::Name;
use intern::{Symbol, sym};
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use stdx::never;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
DropGlue,
display::DisplayTarget,
error_lifetime,
drop::{DropGlue, has_drop_glue},
mir::eval::{
Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay,
InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId,
LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
Ty, TyBuilder, TyExt, pad16,
},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, convert_ty_for_result},
Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, LangItem,
Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
},
next_solver::Region,
};
mod simd;
@ -49,16 +38,16 @@ macro_rules! not_supported {
};
}
impl Evaluator<'_> {
impl<'db> Evaluator<'db> {
pub(super) fn detect_and_exec_special_function(
&mut self,
def: FunctionId,
args: &[IntervalAndTy],
generic_args: &Substitution,
locals: &Locals,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
destination: Interval,
span: MirSpan,
) -> Result<bool> {
) -> Result<'db, bool> {
if self.not_special_fn_cache.borrow().contains(&def) {
return Ok(false);
}
@ -118,18 +107,16 @@ impl Evaluator<'_> {
if let ItemContainerId::TraitId(t) = def.lookup(self.db).container
&& self.db.lang_attr(t.into()) == Some(LangItem::Clone)
{
let [self_ty] = generic_args.as_slice(Interner) else {
let [self_ty] = generic_args.as_slice() else {
not_supported!("wrong generic arg count for clone");
};
let Some(self_ty) = self_ty.ty(Interner) else {
let Some(self_ty) = self_ty.ty() else {
not_supported!("wrong generic arg kind for clone");
};
// Clone has special impls for tuples and function pointers
if matches!(
self_ty.kind(Interner),
TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..)
) {
self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
if matches!(self_ty.kind(), TyKind::FnPtr(..) | TyKind::Tuple(..) | TyKind::Closure(..))
{
self.exec_clone(def, args, self_ty, locals, destination, span)?;
return Ok(true);
}
// Return early to prevent caching clone as non special fn.
@ -142,7 +129,7 @@ impl Evaluator<'_> {
pub(super) fn detect_and_redirect_special_function(
&mut self,
def: FunctionId,
) -> Result<Option<FunctionId>> {
) -> Result<'db, Option<FunctionId>> {
// `PanicFmt` is redirected to `ConstPanicFmt`
if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
let resolver = CrateRootModuleId::from(self.crate_id).resolver(self.db);
@ -161,15 +148,14 @@ impl Evaluator<'_> {
fn exec_clone(
&mut self,
def: FunctionId,
args: &[IntervalAndTy],
self_ty: Ty,
locals: &Locals,
args: &[IntervalAndTy<'db>],
self_ty: Ty<'db>,
locals: &Locals<'db>,
destination: Interval,
span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
match self_ty.kind(Interner) {
TyKind::Function(_) => {
) -> Result<'db, ()> {
match self_ty.kind() {
TyKind::FnPtr(..) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
@ -182,27 +168,35 @@ impl Evaluator<'_> {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into());
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure(id.0);
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = captures.iter().map(|c| c.ty(self.db, subst));
let (captures, _) = infer.closure_info(id.0);
let layout = self.layout(self_ty)?;
let db = self.db;
let ty_iter = captures.iter().map(|c| c.ty(db, subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
TyKind::Tuple(_, subst) => {
TyKind::Tuple(subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone());
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
let layout = self.layout(self_ty)?;
self.exec_clone_for_fields(
subst.iter(),
layout,
addr,
def,
locals,
destination,
span,
)?;
}
_ => {
self.exec_fn_with_args(
def,
args,
Substitution::from1(Interner, self_ty),
GenericArgs::new_from_iter(self.interner(), [self_ty.into()]),
locals,
destination,
None,
@ -215,21 +209,25 @@ impl Evaluator<'_> {
fn exec_clone_for_fields(
&mut self,
ty_iter: impl Iterator<Item = Ty>,
ty_iter: impl Iterator<Item = Ty<'db>>,
layout: Arc<Layout>,
addr: Address,
def: FunctionId,
locals: &Locals,
locals: &Locals<'db>,
destination: Interval,
span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
) -> Result<'db, ()> {
for (i, ty) in ty_iter.enumerate() {
let size = self.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
let size = self.layout(ty)?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, error_lifetime(), ty.clone()).intern(Interner),
ty: Ty::new_ref(
self.interner(),
Region::error(self.interner()),
ty,
Mutability::Not,
),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
@ -248,9 +246,9 @@ impl Evaluator<'_> {
fn exec_alloc_fn(
&mut self,
alloc_fn: &Symbol,
args: &[IntervalAndTy],
args: &[IntervalAndTy<'db>],
destination: Interval,
) -> Result<()> {
) -> Result<'db, ()> {
match alloc_fn {
_ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
let [size, align] = args else {
@ -310,11 +308,11 @@ impl Evaluator<'_> {
fn exec_lang_item(
&mut self,
it: LangItem,
generic_args: &Substitution,
args: &[IntervalAndTy],
locals: &Locals,
generic_args: GenericArgs<'db>,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
span: MirSpan,
) -> Result<Vec<u8>> {
) -> Result<'db, Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
match it {
@ -325,7 +323,7 @@ impl Evaluator<'_> {
"argument of BeginPanic is not provided".into(),
))?
.clone();
while let TyKind::Ref(_, _, ty) = arg.ty.kind(Interner) {
while let TyKind::Ref(_, ty, _) = arg.ty.kind() {
if ty.is_str() {
let (pointee, metadata) = arg.interval.get(self)?.split_at(self.ptr_size());
let len = from_bytes!(usize, metadata);
@ -344,13 +342,10 @@ impl Evaluator<'_> {
let pointee = arg.interval.get(self)?;
arg = IntervalAndTy {
interval: Interval::new(Address::from_bytes(pointee)?, size),
ty: ty.clone(),
ty,
};
}
Err(MirEvalError::Panic(format!(
"unknown-panic-payload: {:?}",
arg.ty.kind(Interner)
)))
Err(MirEvalError::Panic(format!("unknown-panic-payload: {:?}", arg.ty.kind())))
}
SliceLen => {
let arg = args.next().ok_or(MirEvalError::InternalError(
@ -361,18 +356,17 @@ impl Evaluator<'_> {
Ok(arg[ptr_size..].into())
}
DropInPlace => {
let ty =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
MirEvalError::InternalError(
"generic argument of drop_in_place is not provided".into(),
),
)?;
let ty = generic_args.as_slice().first().and_then(|it| it.ty()).ok_or(
MirEvalError::InternalError(
"generic argument of drop_in_place is not provided".into(),
),
)?;
let arg = args.next().ok_or(MirEvalError::InternalError(
"argument of drop_in_place is not provided".into(),
))?;
let arg = arg.interval.get(self)?.to_owned();
self.run_drop_glue_deep(
ty.clone(),
ty,
locals,
Address::from_bytes(&arg[0..self.ptr_size()])?,
&arg[self.ptr_size()..],
@ -387,11 +381,11 @@ impl Evaluator<'_> {
fn exec_syscall(
&mut self,
id: i64,
args: &[IntervalAndTy],
args: &[IntervalAndTy<'db>],
destination: Interval,
_locals: &Locals,
_locals: &Locals<'db>,
_span: MirSpan,
) -> Result<()> {
) -> Result<'db, ()> {
match id {
318 => {
// SYS_getrandom
@ -417,12 +411,12 @@ impl Evaluator<'_> {
fn exec_extern_c(
&mut self,
as_str: &str,
args: &[IntervalAndTy],
_generic_args: &Substitution,
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals,
locals: &Locals<'db>,
span: MirSpan,
) -> Result<()> {
) -> Result<'db, ()> {
match as_str {
"memcmp" => {
let [ptr1, ptr2, size] = args else {
@ -583,14 +577,13 @@ impl Evaluator<'_> {
fn exec_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals,
locals: &Locals<'db>,
span: MirSpan,
needs_override: bool,
) -> Result<bool> {
let interner = DbInterner::new_with(self.db, None, None);
) -> Result<'db, bool> {
if let Some(name) = name.strip_prefix("atomic_") {
return self
.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span)
@ -748,9 +741,7 @@ impl Evaluator<'_> {
}
match name {
"size_of" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"size_of generic arg is not provided".into(),
));
@ -761,20 +752,16 @@ impl Evaluator<'_> {
// FIXME: `min_align_of` was renamed to `align_of` in Rust 1.89
// (https://github.com/rust-lang/rust/pull/142410)
"min_align_of" | "align_of" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"align_of generic arg is not provided".into(),
));
};
let align = self.layout(ty.to_nextsolver(interner))?.align.bytes();
let align = self.layout(ty)?.align.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"size_of_val" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"size_of_val generic arg is not provided".into(),
));
@ -795,9 +782,7 @@ impl Evaluator<'_> {
// FIXME: `min_align_of_val` was renamed to `align_of_val` in Rust 1.89
// (https://github.com/rust-lang/rust/pull/142410)
"min_align_of_val" | "align_of_val" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"align_of_val generic arg is not provided".into(),
));
@ -816,9 +801,7 @@ impl Evaluator<'_> {
}
}
"type_name" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"type_name generic arg is not provided".into(),
));
@ -845,14 +828,12 @@ impl Evaluator<'_> {
.write_from_bytes(self, &len.to_le_bytes())
}
"needs_drop" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"size_of generic arg is not provided".into(),
));
};
let result = match self.db.has_drop_glue(ty.clone(), self.trait_env.clone()) {
let result = match has_drop_glue(&self.infcx, ty, self.trait_env.clone()) {
DropGlue::HasDropGlue => true,
DropGlue::None => false,
DropGlue::DependOnParams => {
@ -915,9 +896,7 @@ impl Evaluator<'_> {
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"ptr_offset_from generic arg is not provided".into(),
));
@ -1006,13 +985,11 @@ impl Evaluator<'_> {
"const_eval_select args are not provided".into(),
));
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [lhs.ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let op_size =
self.size_of_sized(&lhs.ty, locals, "operand of add_with_overflow")?;
let result_ty = Ty::new_tup_from_iter(
self.interner(),
[lhs.ty, Ty::new_bool(self.interner())].into_iter(),
);
let op_size = self.size_of_sized(lhs.ty, locals, "operand of add_with_overflow")?;
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let (ans, u128overflow) = match name {
@ -1024,7 +1001,7 @@ impl Evaluator<'_> {
let is_overflow = u128overflow
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(result_ty.to_nextsolver(interner))?;
let layout = self.layout(result_ty)?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1041,9 +1018,7 @@ impl Evaluator<'_> {
"copy_nonoverlapping args are not provided".into(),
));
};
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"copy_nonoverlapping generic arg is not provided".into(),
));
@ -1062,43 +1037,35 @@ impl Evaluator<'_> {
return Err(MirEvalError::InternalError("offset args are not provided".into()));
};
let ty = if name == "offset" {
let Some(ty0) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty0) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"offset generic arg is not provided".into(),
));
};
let Some(ty1) =
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
else {
let Some(ty1) = generic_args.as_slice().get(1).and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"offset generic arg is not provided".into(),
));
};
if !matches!(
ty1.as_builtin(),
Some(
BuiltinType::Int(BuiltinInt::Isize)
| BuiltinType::Uint(BuiltinUint::Usize)
)
ty1.kind(),
TyKind::Int(rustc_type_ir::IntTy::Isize)
| TyKind::Uint(rustc_type_ir::UintTy::Usize)
) {
return Err(MirEvalError::InternalError(
"offset generic arg is not usize or isize".into(),
));
}
match ty0.as_raw_ptr() {
Some((ty, _)) => ty,
None => {
match ty0.kind() {
TyKind::RawPtr(ty, _) => ty,
_ => {
return Err(MirEvalError::InternalError(
"offset generic arg is not a raw pointer".into(),
));
}
}
} else {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"arith_offset generic arg is not provided".into(),
));
@ -1223,9 +1190,7 @@ impl Evaluator<'_> {
"discriminant_value arg is not provided".into(),
));
};
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"discriminant_value generic arg is not provided".into(),
));
@ -1233,7 +1198,7 @@ impl Evaluator<'_> {
let addr = Address::from_bytes(arg.get(self)?)?;
let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?;
let interval = Interval { addr, size };
let r = self.compute_discriminant(ty.clone(), interval.get(self)?)?;
let r = self.compute_discriminant(ty, interval.get(self)?)?;
destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])
}
"const_eval_select" => {
@ -1243,14 +1208,13 @@ impl Evaluator<'_> {
));
};
let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
let TyKind::Tuple(fields) = tuple.ty.kind() else {
return Err(MirEvalError::InternalError(
"const_eval_select arg[0] is not a tuple".into(),
));
};
let layout = self.layout(tuple.ty.to_nextsolver(interner))?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let layout = self.layout(tuple.ty)?;
for (i, field) in fields.iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
@ -1264,7 +1228,7 @@ impl Evaluator<'_> {
def,
&args,
// FIXME: wrong for manual impls of `FnOnce`
Substitution::empty(Interner),
GenericArgs::new_from_iter(self.interner(), []),
locals,
destination,
None,
@ -1290,9 +1254,7 @@ impl Evaluator<'_> {
));
};
let dst = Address::from_bytes(ptr.get(self)?)?;
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"write_via_copy generic arg is not provided".into(),
));
@ -1309,9 +1271,7 @@ impl Evaluator<'_> {
};
let count = from_bytes!(usize, count.get(self)?);
let val = from_bytes!(u8, val.get(self)?);
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"write_bytes generic arg is not provided".into(),
));
@ -1339,16 +1299,14 @@ impl Evaluator<'_> {
"three_way_compare args are not provided".into(),
));
};
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"three_way_compare generic arg is not provided".into(),
));
};
let signed = match ty.as_builtin().unwrap() {
BuiltinType::Int(_) => true,
BuiltinType::Uint(_) => false,
let signed = match ty.kind() {
TyKind::Int(_) => true,
TyKind::Uint(_) => false,
_ => {
return Err(MirEvalError::InternalError(
"three_way_compare expects an integral type".into(),
@ -1372,8 +1330,8 @@ impl Evaluator<'_> {
result = (l as i8).cmp(&(r as i8));
}
if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
let ty = self.db.ty(e.into()).skip_binder().to_chalk(interner);
let r = self.compute_discriminant(ty.clone(), &[result as i8 as u8])?;
let ty = self.db.ty(e.into()).skip_binder();
let r = self.compute_discriminant(ty, &[result as i8 as u8])?;
destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;
Ok(())
} else {
@ -1402,38 +1360,37 @@ impl Evaluator<'_> {
fn size_align_of_unsized(
&mut self,
ty: &Ty,
ty: Ty<'db>,
metadata: Interval,
locals: &Locals,
) -> Result<(usize, usize)> {
let interner = DbInterner::new_with(self.db, None, None);
Ok(match ty.kind(Interner) {
locals: &Locals<'db>,
) -> Result<'db, (usize, usize)> {
Ok(match ty.kind() {
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
TyKind::Slice(inner) => {
let len = from_bytes!(usize, metadata.get(self)?);
let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?;
(size * len, align)
}
TyKind::Dyn(_) => self.size_align_of_sized(
&convert_ty_for_result(interner, self.vtable_map.ty_of_bytes(metadata.get(self)?)?),
TyKind::Dynamic(..) => self.size_align_of_sized(
self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
locals,
"dyn concrete type",
)?,
TyKind::Adt(id, subst) => {
let id = id.0;
let layout = self.layout_adt(id, subst.clone())?;
TyKind::Adt(adt_def, subst) => {
let id = adt_def.def_id().0;
let layout = self.layout_adt(id, subst)?;
let id = match id {
AdtId::StructId(s) => s,
_ => not_supported!("unsized enum or union"),
};
let field_types = &self.db.field_types(id.into());
let field_types = self.db.field_types_ns(id.into());
let last_field_ty =
field_types.iter().next_back().unwrap().1.clone().substitute(Interner, subst);
field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst);
let sized_part_size =
layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
let sized_part_align = layout.align.bytes() as usize;
let (unsized_part_size, unsized_part_align) =
self.size_align_of_unsized(&last_field_ty, metadata, locals)?;
self.size_align_of_unsized(last_field_ty, metadata, locals)?;
let align = sized_part_align.max(unsized_part_align) as isize;
let size = (sized_part_size + unsized_part_size) as isize;
// Must add any necessary padding to `size`
@ -1456,13 +1413,12 @@ impl Evaluator<'_> {
fn exec_atomic_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals,
locals: &Locals<'db>,
_span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
) -> Result<'db, ()> {
// We are a single threaded runtime with no UB checking and no optimization, so
// we can implement atomic intrinsics as normal functions.
@ -1472,8 +1428,7 @@ impl Evaluator<'_> {
// The rest of atomic intrinsics have exactly one generic arg
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"atomic intrinsic generic arg is not provided".into(),
));
@ -1555,12 +1510,11 @@ impl Evaluator<'_> {
} else {
(arg0_interval, false)
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let layout = self.layout(result_ty.to_nextsolver(interner))?;
let result_ty = Ty::new_tup_from_iter(
self.interner(),
[ty, Ty::new_bool(self.interner())].into_iter(),
);
let layout = self.layout(result_ty)?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,

View file

@ -2,7 +2,6 @@
use std::cmp::Ordering;
use crate::TyKind;
use crate::consteval::try_const_usize;
use super::*;
@ -22,23 +21,21 @@ macro_rules! not_supported {
};
}
impl Evaluator<'_> {
fn detect_simd_ty(&self, ty: &Ty) -> Result<(usize, Ty)> {
match ty.kind(Interner) {
TyKind::Adt(id, subst) => {
let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner))
{
impl<'db> Evaluator<'db> {
fn detect_simd_ty(&self, ty: Ty<'db>) -> Result<'db, (usize, Ty<'db>)> {
match ty.kind() {
TyKind::Adt(adt_def, subst) => {
let len = match subst.as_slice().get(1).and_then(|it| it.konst()) {
Some(len) => len,
_ => {
if let AdtId::StructId(id) = id.0 {
if let AdtId::StructId(id) = adt_def.def_id().0 {
let struct_data = id.fields(self.db);
let fields = struct_data.fields();
let Some((first_field, _)) = fields.iter().next() else {
not_supported!("simd type with no field");
};
let field_ty = self.db.field_types(id.into())[first_field]
.clone()
.substitute(Interner, subst);
let field_ty = self.db.field_types_ns(id.into())[first_field]
.instantiate(self.interner(), subst);
return Ok((fields.len(), field_ty));
}
return Err(MirEvalError::InternalError(
@ -48,14 +45,12 @@ impl Evaluator<'_> {
};
match try_const_usize(self.db, len) {
Some(len) => {
let Some(ty) =
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
let Some(ty) = subst.as_slice().first().and_then(|it| it.ty()) else {
return Err(MirEvalError::InternalError(
"simd type with no ty param".into(),
));
};
Ok((len as usize, ty.clone()))
Ok((len as usize, ty))
}
None => Err(MirEvalError::InternalError(
"simd type with unevaluatable len param".into(),
@ -69,12 +64,12 @@ impl Evaluator<'_> {
pub(super) fn exec_simd_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
_generic_args: &Substitution,
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
_locals: &Locals,
_locals: &Locals<'db>,
_span: MirSpan,
) -> Result<()> {
) -> Result<'db, ()> {
match name {
"and" | "or" | "xor" => {
let [left, right] = args else {
@ -99,8 +94,8 @@ impl Evaluator<'_> {
let [left, right] = args else {
return Err(MirEvalError::InternalError("simd args are not provided".into()));
};
let (len, ty) = self.detect_simd_ty(&left.ty)?;
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
let (len, ty) = self.detect_simd_ty(left.ty)?;
let is_signed = matches!(ty.kind(), TyKind::Int(_));
let size = left.interval.size / len;
let dest_size = destination.size / len;
let mut destination_bytes = vec![];
@ -137,7 +132,7 @@ impl Evaluator<'_> {
"simd_bitmask args are not provided".into(),
));
};
let (op_len, _) = self.detect_simd_ty(&op.ty)?;
let (op_len, _) = self.detect_simd_ty(op.ty)?;
let op_count = op.interval.size / op_len;
let mut result: u64 = 0;
for (i, val) in op.get(self)?.chunks(op_count).enumerate() {
@ -153,7 +148,7 @@ impl Evaluator<'_> {
"simd_shuffle args are not provided".into(),
));
};
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
let TyKind::Array(_, index_len) = index.ty.kind() else {
return Err(MirEvalError::InternalError(
"simd_shuffle index argument has non-array type".into(),
));
@ -166,7 +161,7 @@ impl Evaluator<'_> {
));
}
};
let (left_len, _) = self.detect_simd_ty(&left.ty)?;
let (left_len, _) = self.detect_simd_ty(left.ty)?;
let left_size = left.interval.size / left_len;
let vector =
left.get(self)?.chunks(left_size).chain(right.get(self)?.chunks(left_size));

View file

@ -4,15 +4,20 @@ use span::Edition;
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
use crate::display::DisplayTarget;
use crate::{
Interner, Substitution, db::HirDatabase, mir::MirLowerError, setup_tracing, test_db::TestDB,
db::HirDatabase,
display::DisplayTarget,
mir::MirLowerError,
next_solver::{DbInterner, GenericArgs},
setup_tracing,
test_db::TestDB,
};
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
salsa::attach(db, || {
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
crate::attach_db(db, || {
let interner = DbInterner::new_with(db, None, None);
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
@ -34,7 +39,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
let body = db
.monomorphized_mir_body(
func_id.into(),
Substitution::empty(Interner),
GenericArgs::new_from_iter(interner, []),
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
@ -56,7 +61,7 @@ fn check_pass_and_stdio(
) {
let _tracing = setup_tracing();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let x = eval_main(&db, file_id);
match x {
@ -102,7 +107,7 @@ fn check_pass_and_stdio(
fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
assert_eq!(
@ -114,10 +119,10 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
fn check_error_with(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expect_err: impl FnOnce(MirEvalError) -> bool,
expect_err: impl FnOnce(MirEvalError<'_>) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
assert!(expect_err(e));
@ -631,11 +636,16 @@ fn main() {
);
}
#[ignore = "
FIXME(next-solver):
This does not work currently because I replaced homemade selection with selection by the trait solver;
This will work once we implement `Interner::impl_specializes()` properly.
"]
#[test]
fn specialization_array_clone() {
check_pass(
r#"
//- minicore: copy, derive, slice, index, coerce_unsized
//- minicore: copy, derive, slice, index, coerce_unsized, panic
impl<T: Clone, const N: usize> Clone for [T; N] {
#[inline]
fn clone(&self) -> Self {
@ -650,8 +660,7 @@ trait SpecArrayClone: Clone {
impl<T: Clone> SpecArrayClone for T {
#[inline]
default fn clone<const N: usize>(array: &[T; N]) -> [T; N] {
// FIXME: panic here when we actually implement specialization.
from_slice(array)
panic!("should go to the specialized impl")
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,14 @@
//! MIR lowering for places
use crate::mir::{MutBorrowKind, Operand, OperandKind};
use super::*;
use hir_def::FunctionId;
use intern::sym;
use rustc_type_ir::inherent::{AdtDef, Region as _, Ty as _};
use super::*;
use crate::{
mir::{MutBorrowKind, Operand, OperandKind},
next_solver::Region,
};
macro_rules! not_supported {
($it: expr) => {
@ -12,12 +16,12 @@ macro_rules! not_supported {
};
}
impl MirLowerCtx<'_> {
impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_without_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId,
) -> Result<Option<(Place, BasicBlockId)>> {
prev_block: BasicBlockId<'db>,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -31,12 +35,12 @@ impl MirLowerCtx<'_> {
fn lower_expr_to_some_place_with_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<Option<(Place, BasicBlockId)>> {
prev_block: BasicBlockId<'db>,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let ty = adjustments
.last()
.map(|it| it.target.clone())
.map(|it| it.target)
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -49,12 +53,12 @@ impl MirLowerCtx<'_> {
pub(super) fn lower_expr_as_place_with_adjust(
&mut self,
current: BasicBlockId,
current: BasicBlockId<'db>,
expr_id: ExprId,
upgrade_rvalue: bool,
adjustments: &[Adjustment],
) -> Result<Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_>| {
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
}
@ -89,9 +93,9 @@ impl MirLowerCtx<'_> {
current,
r,
rest.last()
.map(|it| it.target.clone())
.map(|it| it.target)
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target.clone(),
last.target,
expr_id.into(),
match od.0 {
Some(Mutability::Mut) => true,
@ -111,10 +115,10 @@ impl MirLowerCtx<'_> {
pub(super) fn lower_expr_as_place(
&mut self,
current: BasicBlockId,
current: BasicBlockId<'db>,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<Option<(Place, BasicBlockId)>> {
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
match self.infer.expr_adjustments.get(&expr_id) {
Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
@ -123,11 +127,11 @@ impl MirLowerCtx<'_> {
pub(super) fn lower_expr_as_place_without_adjust(
&mut self,
current: BasicBlockId,
current: BasicBlockId<'db>,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_>| {
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
}
@ -149,9 +153,13 @@ impl MirLowerCtx<'_> {
}
ValueNs::StaticId(s) => {
let ty = self.expr_ty_without_adjust(expr_id);
let ref_ty =
TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner);
let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
let ref_ty = Ty::new_ref(
self.interner(),
Region::new_static(self.interner()),
ty,
Mutability::Not,
);
let temp: Place<'db> = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp,
@ -167,10 +175,10 @@ impl MirLowerCtx<'_> {
}
}
Expr::UnaryOp { expr, op: hir_def::hir::UnaryOp::Deref } => {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true,
let is_builtin = match self.expr_ty_without_adjust(*expr).kind() {
TyKind::Ref(..) | TyKind::RawPtr(..) => true,
TyKind::Adt(id, _) => {
if let Some(lang_item) = self.db.lang_attr(id.0.into()) {
if let Some(lang_item) = self.db.lang_attr(id.def_id().0.into()) {
lang_item == LangItem::OwnedBox
} else {
false
@ -219,9 +227,9 @@ impl MirLowerCtx<'_> {
Expr::Index { base, index } => {
let base_ty = self.expr_ty_after_adjustments(*base);
let index_ty = self.expr_ty_after_adjustments(*index);
if index_ty != TyBuilder::usize()
if !matches!(index_ty.kind(), TyKind::Uint(rustc_ast_ir::UintTy::Usize))
|| !matches!(
base_ty.strip_reference().kind(Interner),
base_ty.strip_reference().kind(),
TyKind::Array(..) | TyKind::Slice(..)
)
{
@ -278,24 +286,26 @@ impl MirLowerCtx<'_> {
fn lower_overloaded_index(
&mut self,
current: BasicBlockId,
place: Place,
base_ty: Ty,
result_ty: Ty,
index_operand: Operand,
current: BasicBlockId<'db>,
place: Place<'db>,
base_ty: Ty<'db>,
result_ty: Ty<'db>,
index_operand: Operand<'db>,
span: MirSpan,
index_fn: (FunctionId, Substitution),
) -> Result<Option<(Place, BasicBlockId)>> {
index_fn: (FunctionId, GenericArgs<'db>),
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let mutability = match base_ty.as_reference() {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref = TyKind::Ref(mutability, error_lifetime(), result_ty).intern(Interner);
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
TyKind::FnDef(CallableDefId::FunctionId(index_fn.0).to_chalk(self.db), index_fn.1)
.intern(Interner),
);
let result_ref =
Ty::new_ref(self.interner(), Region::error(self.interner()), result_ty, mutability);
let mut result: Place<'db> = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(index_fn.0).into(),
index_fn.1,
));
let Some(current) = self.lower_call(
index_fn_op,
Box::new([Operand { kind: OperandKind::Copy(place), span: None }, index_operand]),
@ -313,14 +323,14 @@ impl MirLowerCtx<'_> {
fn lower_overloaded_deref(
&mut self,
current: BasicBlockId,
place: Place,
source_ty: Ty,
target_ty: Ty,
current: BasicBlockId<'db>,
place: Place<'db>,
source_ty: Ty<'db>,
target_ty: Ty<'db>,
span: MirSpan,
mutability: bool,
) -> Result<Option<(Place, BasicBlockId)>> {
let (chalk_mut, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
(
Mutability::Not,
LangItem::Deref,
@ -335,9 +345,10 @@ impl MirLowerCtx<'_> {
BorrowKind::Mut { kind: MutBorrowKind::Default },
)
};
let ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
let error_region = Region::error(self.interner());
let ty_ref = Ty::new_ref(self.interner(), error_region, source_ty, mutability);
let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability);
let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
.resolve_lang_item(trait_lang_item)?
@ -347,14 +358,12 @@ impl MirLowerCtx<'_> {
.trait_items(self.db)
.method_by_name(&trait_method_name)
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
let deref_fn_op = Operand::const_zst(
TyKind::FnDef(
CallableDefId::FunctionId(deref_fn).to_chalk(self.db),
Substitution::from1(Interner, source_ty),
)
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
let deref_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(deref_fn).into(),
GenericArgs::new_from_iter(self.interner(), [source_ty.into()]),
));
let mut result: Place<'db> = self.temp(target_ty_ref, current, span)?.into();
let Some(current) = self.lower_call(
deref_fn_op,
Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]),

View file

@ -1,17 +1,18 @@
//! MIR lowering for patterns
use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use crate::next_solver::GenericArgs;
use crate::{
BindingMode,
mir::{
LocalId, MutBorrowKind, Operand, OperandKind,
lower::{
BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner,
MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Pat, PatId, Place,
PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue,
Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind,
ValueNs, VariantId,
BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, MemoryMap,
MirLowerCtx, MirLowerError, MirSpan, Pat, PatId, Place, PlaceElem, ProjectionElem,
RecordFieldPat, ResolveValueResult, Result, Rvalue, SwitchTargets, TerminatorKind,
TupleFieldId, TupleId, Ty, TyKind, ValueNs, VariantId,
},
},
};
@ -50,7 +51,7 @@ enum MatchingMode {
Assign,
}
impl MirLowerCtx<'_> {
impl<'db> MirLowerCtx<'_, 'db> {
/// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if
/// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which
/// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the
@ -62,11 +63,11 @@ impl MirLowerCtx<'_> {
/// so it should be an empty block.
pub(super) fn pattern_match(
&mut self,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: Place,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: Place<'db>,
pattern: PatId,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
@ -86,10 +87,10 @@ impl MirLowerCtx<'_> {
pub(super) fn pattern_match_assignment(
&mut self,
current: BasicBlockId,
value: Place,
current: BasicBlockId<'db>,
value: Place<'db>,
pattern: PatId,
) -> Result<BasicBlockId> {
) -> Result<'db, BasicBlockId<'db>> {
let (current, _) =
self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?;
Ok(current)
@ -98,9 +99,9 @@ impl MirLowerCtx<'_> {
pub(super) fn match_self_param(
&mut self,
id: BindingId,
current: BasicBlockId,
local: LocalId,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
current: BasicBlockId<'db>,
local: LocalId<'db>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
self.pattern_match_binding(
id,
BindingMode::Move,
@ -113,12 +114,12 @@ impl MirLowerCtx<'_> {
fn pattern_match_inner(
&mut self,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
mut cond_place: Place,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
mut cond_place: Place<'db>,
pattern: PatId,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default();
cond_place.projection = self.result.projection_store.intern(
cond_place
@ -134,8 +135,8 @@ impl MirLowerCtx<'_> {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
let subst = match self.infer[pattern].kind(Interner) {
TyKind::Tuple(_, s) => s,
let subst = match self.infer[pattern].kind() {
TyKind::Tuple(s) => s,
_ => {
return Err(MirLowerError::TypeError(
"non tuple type matched with tuple pattern",
@ -147,7 +148,7 @@ impl MirLowerCtx<'_> {
current_else,
args,
*ellipsis,
(0..subst.len(Interner)).map(|i| {
(0..subst.len()).map(|i| {
PlaceElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // Dummy as it is unused
index: i as u32,
@ -207,13 +208,12 @@ impl MirLowerCtx<'_> {
)?
}
Pat::Range { start, end } => {
let mut add_check = |l: &ExprId, binop| -> Result<()> {
let lv =
self.lower_literal_or_const_to_operand(self.infer[pattern].clone(), l)?;
let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> {
let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
let discr: Place<'db> =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
discr,
@ -249,10 +249,11 @@ impl MirLowerCtx<'_> {
Pat::Slice { prefix, slice, suffix } => {
if mode == MatchingMode::Check {
// emit runtime length check for slice
if let TyKind::Slice(_) = self.infer[pattern].kind(Interner) {
if let TyKind::Slice(_) = self.infer[pattern].kind() {
let pattern_len = prefix.len() + suffix.len();
let place_len: Place =
self.temp(TyBuilder::usize(), current, pattern.into())?.into();
let place_len: Place<'db> = self
.temp(Ty::new_usize(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
current,
place_len,
@ -282,10 +283,11 @@ impl MirLowerCtx<'_> {
let c = Operand::from_concrete_const(
pattern_len.to_le_bytes().into(),
MemoryMap::default(),
TyBuilder::usize(),
Ty::new_usize(self.interner()),
);
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
let discr: Place<'db> = self
.temp(Ty::new_bool(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
current,
discr,
@ -396,22 +398,16 @@ impl MirLowerCtx<'_> {
break 'b (c, x.1);
}
if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
break 'b (c, Substitution::empty(Interner));
break 'b (c, GenericArgs::new_from_iter(self.interner(), []));
}
not_supported!("path in pattern position that is not const or variant")
};
let tmp: Place =
self.temp(self.infer[pattern].clone(), current, pattern.into())?.into();
let tmp: Place<'db> =
self.temp(self.infer[pattern], current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(
c.into(),
current,
tmp,
subst,
span,
self.infer[pattern].clone(),
)?;
let tmp2: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.lower_const(c.into(), current, tmp, subst, span)?;
let tmp2: Place<'db> =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2,
@ -438,7 +434,7 @@ impl MirLowerCtx<'_> {
Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
let c = self.lower_literal_to_operand(self.infer[pattern], l)?;
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
@ -510,11 +506,11 @@ impl MirLowerCtx<'_> {
&mut self,
id: BindingId,
mode: BindingMode,
cond_place: Place,
cond_place: Place<'db>,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
let target_place = self.binding_local(id)?;
self.push_storage_live(id, current)?;
self.push_match_assignment(current, target_place, mode, cond_place, span);
@ -523,10 +519,10 @@ impl MirLowerCtx<'_> {
fn push_match_assignment(
&mut self,
current: BasicBlockId,
target_place: LocalId,
current: BasicBlockId<'db>,
target_place: LocalId<'db>,
mode: BindingMode,
cond_place: Place,
cond_place: Place<'db>,
span: MirSpan,
) {
self.push_assignment(
@ -536,8 +532,10 @@ impl MirLowerCtx<'_> {
BindingMode::Move => {
Operand { kind: OperandKind::Copy(cond_place), span: None }.into()
}
BindingMode::Ref(Mutability::Not) => Rvalue::Ref(BorrowKind::Shared, cond_place),
BindingMode::Ref(Mutability::Mut) => {
BindingMode::Ref(rustc_ast_ir::Mutability::Not) => {
Rvalue::Ref(BorrowKind::Shared, cond_place)
}
BindingMode::Ref(rustc_ast_ir::Mutability::Mut) => {
Rvalue::Ref(BorrowKind::Mut { kind: MutBorrowKind::Default }, cond_place)
}
},
@ -547,15 +545,16 @@ impl MirLowerCtx<'_> {
fn pattern_match_const(
&mut self,
current_else: Option<BasicBlockId>,
current: BasicBlockId,
c: Operand,
cond_place: Place,
current_else: Option<BasicBlockId<'db>>,
current: BasicBlockId<'db>,
c: Operand<'db>,
cond_place: Place<'db>,
pattern: Idx<Pat>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
let then_target = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
let discr: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
let discr: Place<'db> =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
discr,
@ -580,14 +579,14 @@ impl MirLowerCtx<'_> {
fn pattern_matching_variant(
&mut self,
cond_place: Place,
cond_place: Place<'db>,
variant: VariantId,
mut current: BasicBlockId,
mut current: BasicBlockId<'db>,
span: MirSpan,
mut current_else: Option<BasicBlockId>,
mut current_else: Option<BasicBlockId<'db>>,
shape: AdtPatternShape<'_>,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
Ok(match variant {
VariantId::EnumVariantId(v) => {
if mode == MatchingMode::Check {
@ -636,11 +635,11 @@ impl MirLowerCtx<'_> {
shape: AdtPatternShape<'_>,
variant_data: &VariantFields,
v: VariantId,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: &Place,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: &Place<'db>,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
Ok(match shape {
AdtPatternShape::Record { args } => {
let it = args
@ -656,7 +655,7 @@ impl MirLowerCtx<'_> {
x.pat,
))
})
.collect::<Result<Vec<_>>>()?;
.collect::<Result<'db, Vec<_>>>()?;
self.pattern_match_adt(current, current_else, it.into_iter(), cond_place, mode)?
}
AdtPatternShape::Tuple { args, ellipsis } => {
@ -679,12 +678,12 @@ impl MirLowerCtx<'_> {
fn pattern_match_adt(
&mut self,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
args: impl Iterator<Item = (PlaceElem, PatId)>,
cond_place: &Place,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
args: impl Iterator<Item = (PlaceElem<'db>, PatId)>,
cond_place: &Place<'db>,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
for (proj, arg) in args {
let cond_place = cond_place.project(proj, &mut self.result.projection_store);
(current, current_else) =
@ -695,14 +694,14 @@ impl MirLowerCtx<'_> {
fn pattern_match_tuple_like(
&mut self,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
args: &[PatId],
ellipsis: Option<u32>,
fields: impl DoubleEndedIterator<Item = PlaceElem> + Clone,
cond_place: &Place,
fields: impl DoubleEndedIterator<Item = PlaceElem<'db>> + Clone,
cond_place: &Place<'db>,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let it = al
.iter()

View file

@ -1,36 +1,23 @@
use hir_def::db::DefDatabase;
use rustc_hash::FxHashMap;
use span::Edition;
use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{
db::HirDatabase,
mir::{MirBody, MirLowerError},
setup_tracing,
test_db::TestDB,
};
use crate::{db::HirDatabase, setup_tracing, test_db::TestDB};
fn lower_mir(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> FxHashMap<String, Result<Arc<MirBody>, MirLowerError>> {
fn lower_mir(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let _tracing = setup_tracing();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let funcs = scope.declarations().filter_map(|x| match x {
hir_def::ModuleDefId::FunctionId(it) => Some(it),
_ => None,
});
funcs
.map(|func| {
let name = db.function_signature(func).name.display(&db, Edition::CURRENT).to_string();
let mir = db.mir_body(func.into());
(name, mir)
})
.collect()
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let funcs = scope.declarations().filter_map(|x| match x {
hir_def::ModuleDefId::FunctionId(it) => Some(it),
_ => None,
});
for func in funcs {
_ = db.mir_body(func.into());
}
})
}
#[test]

View file

@ -7,224 +7,129 @@
//!
//! So the monomorphization should be called even if the substitution is empty.
use std::mem;
use chalk_ir::{
ConstData, DebruijnIndex,
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
};
use hir_def::DefWithBodyId;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use rustc_type_ir::{
FallibleTypeFolder, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeVisitableExt,
};
use triomphe::Arc;
use crate::next_solver::{Const, ConstKind, Region, RegionKind};
use crate::{
Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, InternedClosure, InternedClosureId},
from_placeholder_idx,
generics::{Generics, generics},
infer::normalize,
TraitEnvironment,
db::{HirDatabase, InternedClosureId},
next_solver::{
DbInterner, GenericArgs, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
references_non_lt_error,
},
};
use super::{MirBody, MirLowerError, Operand, OperandKind, Rvalue, StatementKind, TerminatorKind};
macro_rules! not_supported {
($it: expr) => {
return Err(MirLowerError::NotSupported(format!($it)))
};
struct Filler<'db> {
infcx: InferCtxt<'db>,
trait_env: Arc<TraitEnvironment<'db>>,
subst: GenericArgs<'db>,
}
struct Filler<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment<'a>>,
subst: &'a Substitution,
generics: Option<Generics>,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = MirLowerError;
impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
type Error = MirLowerError<'db>;
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
fn cx(&self) -> DbInterner<'db> {
self.infcx.interner
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_ty(&mut self, ty: Ty<'db>) -> Result<Ty<'db>, Self::Error> {
if !ty.has_type_flags(TypeFlags::HAS_ALIAS | TypeFlags::HAS_PARAM) {
return Ok(ty);
}
fn try_fold_ty(
&mut self,
ty: Ty,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
match ty.kind(Interner) {
TyKind::AssociatedType(id, subst) => {
// I don't know exactly if and why this is needed, but it looks like `normalize_ty` likes
// this kind of associated types.
Ok(TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
associated_ty_id: *id,
substitution: subst.clone().try_fold_with(self, outer_binder)?,
}))
.intern(Interner))
match ty.kind() {
TyKind::Alias(..) => {
// First instantiate params.
let ty = ty.try_super_fold_with(self)?;
let mut ocx = ObligationCtxt::new(&self.infcx);
let ty = ocx
.structurally_normalize_ty(&ObligationCause::dummy(), self.trait_env.env, ty)
.map_err(|_| MirLowerError::NotSupported("can't normalize alias".to_owned()))?;
ty.try_super_fold_with(self)
}
TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy {
opaque_ty_id: id,
substitution: subst,
}))
| TyKind::OpaqueType(id, subst) => {
let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?;
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db.infer(func.into());
let filler = &mut Filler {
db: self.db,
trait_env: self.trait_env.clone(),
subst: &subst,
generics: Some(generics(self.db, func.into())),
};
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
}
crate::ImplTraitId::TypeAliasImplTrait(..) => {
not_supported!("type alias impl trait");
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
not_supported!("async block impl trait");
}
}
}
_ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
TyKind::Param(param) => Ok(self
.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.ty())
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)
})?),
_ => ty.try_super_fold_with(self),
}
}
fn try_fold_free_placeholder_const(
&mut self,
_ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
let it = from_placeholder_idx(self.db, idx).0;
let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
fn try_fold_const(&mut self, ct: Const<'db>) -> Result<Const<'db>, Self::Error> {
let ConstKind::Param(param) = ct.kind() else {
return ct.try_super_fold_with(self);
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|it| it.constant(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.konst())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let it = from_placeholder_idx(self.db, idx).0;
let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
fn try_fold_region(&mut self, region: Region<'db>) -> Result<Region<'db>, Self::Error> {
let RegionKind::ReEarlyParam(param) = region.kind() else {
return Ok(region);
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|it| it.ty(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
}
fn try_fold_const(
&mut self,
constant: chalk_ir::Const<Interner>,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let next_ty = normalize(
self.db,
self.trait_env.clone(),
constant.data(Interner).ty.clone().try_fold_with(self, outer_binder)?,
);
ConstData { ty: next_ty, value: constant.data(Interner).value.clone() }
.intern(Interner)
.try_super_fold_with(self, outer_binder)
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.region())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
}
}
impl Filler<'_> {
fn fill_ty(&mut self, ty: &mut Ty) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, TyKind::Error.intern(Interner));
*ty = normalize(
self.db,
self.trait_env.clone(),
tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?,
);
Ok(())
impl<'db> Filler<'db> {
fn new(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
subst: GenericArgs<'db>,
) -> Self {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
Self { infcx, trait_env: env, subst }
}
fn fill_const(&mut self, c: &mut Const) -> Result<(), MirLowerError> {
let tmp = mem::replace(c, unknown_const(c.data(Interner).ty.clone()));
*c = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
fn fill<T: TypeFoldable<DbInterner<'db>> + Copy>(
&mut self,
t: &mut T,
) -> Result<(), MirLowerError<'db>> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.try_fold_with(self)?;
if references_non_lt_error(t) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_subst(&mut self, ty: &mut Substitution) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, Substitution::empty(Interner));
*ty = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
}
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
fn fill_operand(&mut self, op: &mut Operand<'db>) -> Result<(), MirLowerError<'db>> {
match &mut op.kind {
OperandKind::Constant(c) => {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(b) => {
let resolved = self
.subst
.as_slice(Interner)
.get(b.index)
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(
self.generics
.as_ref()
.and_then(|it| it.iter().nth(b.index))
.and_then(|(id, _)| match id {
hir_def::GenericParamId::ConstParamId(id) => {
Some(hir_def::TypeOrConstParamId::from(id))
}
hir_def::GenericParamId::TypeParamId(id) => {
Some(hir_def::TypeOrConstParamId::from(id))
}
_ => None,
})
.unwrap(),
self.subst.clone(),
)
})?
.assert_const_ref(Interner);
*c = resolved.clone();
}
chalk_ir::ConstValue::InferenceVar(_)
| chalk_ir::ConstValue::Placeholder(_) => {}
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
let mut subst = subst.clone();
self.fill_subst(&mut subst)?;
*c = intern_const_scalar(
crate::ConstScalar::UnevaluatedConst(*const_id, subst),
c.data(Interner).ty.clone(),
);
}
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
},
}
self.fill_const(c)?;
OperandKind::Constant { konst, ty } => {
self.fill(konst)?;
self.fill(ty)?;
}
OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (),
}
Ok(())
}
fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> {
fn fill_body(&mut self, body: &mut MirBody<'db>) -> Result<(), MirLowerError<'db>> {
for (_, l) in body.locals.iter_mut() {
self.fill_ty(&mut l.ty)?;
self.fill(&mut l.ty)?;
}
for (_, bb) in body.basic_blocks.iter_mut() {
for statement in &mut bb.statements {
@ -237,20 +142,20 @@ impl Filler<'_> {
match ak {
super::AggregateKind::Array(ty)
| super::AggregateKind::Tuple(ty)
| super::AggregateKind::Closure(ty) => self.fill_ty(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill_subst(subst)?,
| super::AggregateKind::Closure(ty) => self.fill(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill(subst)?,
super::AggregateKind::Union(_, _) => (),
}
}
Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => {
self.fill_ty(ty)?;
self.fill(ty)?;
}
Rvalue::Use(op) => {
self.fill_operand(op)?;
}
Rvalue::Repeat(op, len) => {
self.fill_operand(op)?;
self.fill_const(len)?;
self.fill(len)?;
}
Rvalue::Ref(_, _)
| Rvalue::Len(_)
@ -304,11 +209,10 @@ impl Filler<'_> {
pub fn monomorphized_mir_body_query<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
subst: Substitution,
subst: GenericArgs<'db>,
trait_env: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics };
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
let body = db.mir_body(owner)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
@ -318,21 +222,19 @@ pub fn monomorphized_mir_body_query<'db>(
pub(crate) fn monomorphized_mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
_: DefWithBodyId,
_: Substitution,
_: GenericArgs<'db>,
_: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
Err(MirLowerError::Loop)
}
pub fn monomorphized_mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
closure: InternedClosureId,
subst: Substitution,
subst: GenericArgs<'db>,
trait_env: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
let InternedClosure(owner, _) = db.lookup_intern_closure(closure);
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics };
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
let body = db.mir_body_for_closure(closure)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;

View file

@ -11,8 +11,7 @@ use hir_expand::{Lookup, name::Name};
use la_arena::ArenaMap;
use crate::{
ClosureId,
db::HirDatabase,
db::{HirDatabase, InternedClosureId},
display::{ClosureStyle, DisplayTarget, HirDisplay},
mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind},
};
@ -37,8 +36,8 @@ macro_rules! wln {
};
}
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String {
impl<'db> MirBody<'db> {
pub fn pretty_print(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target);
ctx.for_body(|this| match ctx.body.owner {
@ -81,7 +80,7 @@ impl MirBody {
// String with lines is rendered poorly in `dbg` macros, which I use very much, so this
// function exists to solve that.
pub fn dbg(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
pub fn dbg(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
struct StringDbg(String);
impl Debug for StringDbg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -92,17 +91,17 @@ impl MirBody {
}
}
struct MirPrettyCtx<'a> {
body: &'a MirBody,
struct MirPrettyCtx<'a, 'db> {
body: &'a MirBody<'db>,
hir_body: &'a Body,
db: &'a dyn HirDatabase,
db: &'db dyn HirDatabase,
result: String,
indent: String,
local_to_binding: ArenaMap<LocalId, BindingId>,
local_to_binding: ArenaMap<LocalId<'db>, BindingId>,
display_target: DisplayTarget,
}
impl Write for MirPrettyCtx<'_> {
impl Write for MirPrettyCtx<'_, '_> {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
let mut it = s.split('\n'); // note: `.lines()` is wrong here
self.write(it.next().unwrap_or_default());
@ -114,12 +113,12 @@ impl Write for MirPrettyCtx<'_> {
}
}
enum LocalName {
Unknown(LocalId),
Binding(Name, LocalId),
enum LocalName<'db> {
Unknown(LocalId<'db>),
Binding(Name, LocalId<'db>),
}
impl HirDisplay for LocalName {
impl<'db> HirDisplay for LocalName<'db> {
fn hir_fmt(
&self,
f: &mut crate::display::HirFormatter<'_>,
@ -133,8 +132,8 @@ impl HirDisplay for LocalName {
}
}
impl<'a> MirPrettyCtx<'a> {
fn for_body(&mut self, name: impl FnOnce(&mut MirPrettyCtx<'_>)) {
impl<'a, 'db> MirPrettyCtx<'a, 'db> {
fn for_body(&mut self, name: impl FnOnce(&mut MirPrettyCtx<'_, 'db>)) {
name(self);
self.with_block(|this| {
this.locals();
@ -146,8 +145,8 @@ impl<'a> MirPrettyCtx<'a> {
}
}
fn for_closure(&mut self, closure: ClosureId) {
let body = match self.db.mir_body_for_closure(closure.into()) {
fn for_closure(&mut self, closure: InternedClosureId) {
let body = match self.db.mir_body_for_closure(closure) {
Ok(it) => it,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
@ -168,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
self.indent = ctx.indent;
}
fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_, 'db>)) {
self.indent += " ";
wln!(self, "{{");
f(self);
@ -180,9 +179,9 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(
body: &'a MirBody,
body: &'a MirBody<'db>,
hir_body: &'a Body,
db: &'a dyn HirDatabase,
db: &'db dyn HirDatabase,
display_target: DisplayTarget,
) -> Self {
let local_to_binding = body.local_to_binding_map();
@ -217,14 +216,14 @@ impl<'a> MirPrettyCtx<'a> {
}
}
fn local_name(&self, local: LocalId) -> LocalName {
fn local_name(&self, local: LocalId<'db>) -> LocalName<'db> {
match self.local_to_binding.get(local) {
Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}
fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
fn basic_block_id(&self, basic_block_id: BasicBlockId<'db>) -> String {
format!("'bb{}", u32::from(basic_block_id.into_raw()))
}
@ -312,8 +311,12 @@ impl<'a> MirPrettyCtx<'a> {
}
}
fn place(&mut self, p: &Place) {
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
fn place(&mut self, p: &Place<'db>) {
fn f<'db>(
this: &mut MirPrettyCtx<'_, 'db>,
local: LocalId<'db>,
projections: &[PlaceElem<'db>],
) {
let Some((last, head)) = projections.split_last() else {
// no projection
w!(this, "{}", this.local_name(local).display_test(this.db, this.display_target));
@ -373,19 +376,19 @@ impl<'a> MirPrettyCtx<'a> {
f(self, p.local, p.projection.lookup(&self.body.projection_store));
}
fn operand(&mut self, r: &Operand) {
fn operand(&mut self, r: &Operand<'db>) {
match &r.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
// MIR at the time of writing doesn't have difference between move and copy, so we show them
// equally. Feel free to change it.
self.place(p);
}
OperandKind::Constant(c) => w!(self, "Const({})", self.hir_display(c)),
OperandKind::Constant { konst, .. } => w!(self, "Const({})", self.hir_display(konst)),
OperandKind::Static(s) => w!(self, "Static({:?})", s),
}
}
fn rvalue(&mut self, r: &Rvalue) {
fn rvalue(&mut self, r: &Rvalue<'db>) {
match r {
Rvalue::Use(op) => self.operand(op),
Rvalue::Ref(r, p) => {
@ -475,7 +478,7 @@ impl<'a> MirPrettyCtx<'a> {
}
}
fn operand_list(&mut self, it: &[Operand]) {
fn operand_list(&mut self, it: &[Operand<'db>]) {
let mut it = it.iter();
if let Some(first) = it.next() {
self.operand(first);
@ -486,7 +489,10 @@ impl<'a> MirPrettyCtx<'a> {
}
}
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
fn hir_display<'b, T: HirDisplay>(&self, ty: &'b T) -> impl Display + use<'a, 'b, 'db, T>
where
'db: 'b,
{
ty.display_test(self.db, self.display_target)
.with_closure_style(ClosureStyle::ClosureWithSubst)
}

View file

@ -33,6 +33,9 @@ pub use region::*;
pub use solver::*;
pub use ty::*;
pub use crate::lower_nextsolver::ImplTraitIdx;
pub use rustc_ast_ir::Mutability;
pub type Binder<'db, T> = rustc_type_ir::Binder<DbInterner<'db>, T>;
pub type EarlyBinder<'db, T> = rustc_type_ir::EarlyBinder<DbInterner<'db>, T>;
pub type Canonical<'db, T> = rustc_type_ir::Canonical<DbInterner<'db>, T>;

View file

@ -4,10 +4,11 @@ use std::hash::Hash;
use hir_def::{ConstParamId, TypeOrConstParamId};
use intern::{Interned, Symbol};
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::{try_visit, visit::VisitorResult};
use rustc_type_ir::{
BoundVar, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
BoundVar, DebruijnIndex, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable,
TypeSuperVisitable, TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike},
relate::Relate,
};
@ -23,7 +24,7 @@ use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder,
pub type ConstKind<'db> = rustc_type_ir::ConstKind<DbInterner<'db>>;
pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst<DbInterner<'db>>;
#[salsa::interned(constructor = new_, debug)]
#[salsa::interned(constructor = new_)]
pub struct Const<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>>,
@ -41,13 +42,12 @@ impl<'db> Const<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<ConstKind<'db>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn error(interner: DbInterner<'db>) -> Self {
@ -62,6 +62,25 @@ impl<'db> Const<'db> {
Const::new(interner, ConstKind::Placeholder(placeholder))
}
pub fn new_bound(interner: DbInterner<'db>, index: DebruijnIndex, bound: BoundConst) -> Self {
Const::new(interner, ConstKind::Bound(index, bound))
}
pub fn new_valtree(
interner: DbInterner<'db>,
ty: Ty<'db>,
memory: Box<[u8]>,
memory_map: MemoryMap<'db>,
) -> Self {
Const::new(
interner,
ConstKind::Value(ValueConst {
ty,
value: Valtree::new(ConstBytes { memory, memory_map }),
}),
)
}
pub fn is_ct_infer(&self) -> bool {
matches!(&self.inner().internee, ConstKind::Infer(_))
}
@ -78,6 +97,12 @@ impl<'db> Const<'db> {
}
}
impl<'db> std::fmt::Debug for Const<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().internee.fmt(f)
}
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.internee.fmt(f)
@ -136,10 +161,13 @@ impl ParamConst {
/// A type-level constant value.
///
/// Represents a typed, fully evaluated constant.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable)]
pub struct ValueConst<'db> {
pub(crate) ty: Ty<'db>,
pub(crate) value: Valtree<'db>,
pub ty: Ty<'db>,
// FIXME: Should we ignore this for TypeVisitable, TypeFoldable?
#[type_visitable(ignore)]
#[type_foldable(identity)]
pub value: Valtree<'db>,
}
impl<'db> ValueConst<'db> {
@ -159,33 +187,15 @@ impl<'db> rustc_type_ir::inherent::ValueConst<DbInterner<'db>> for ValueConst<'d
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for ValueConst<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
self.ty.visit_with(visitor)
}
}
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for ValueConst<'db> {
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
ValueConst { ty: self.ty.fold_with(folder), value: self.value }
}
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(ValueConst { ty: self.ty.try_fold_with(folder)?, value: self.value })
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstBytes<'db>(pub Box<[u8]>, pub MemoryMap<'db>);
pub struct ConstBytes<'db> {
pub memory: Box<[u8]>,
pub memory_map: MemoryMap<'db>,
}
impl Hash for ConstBytes<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state)
self.memory.hash(state)
}
}
@ -197,25 +207,23 @@ pub struct Valtree<'db> {
impl<'db> Valtree<'db> {
pub fn new(bytes: ConstBytes<'db>) -> Self {
salsa::with_attached_database(|db| unsafe {
crate::with_attached_db(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
})
.unwrap()
}
pub fn inner(&self) -> &ConstBytes<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
pub struct ExprConst;
impl rustc_type_ir::inherent::ParamLike for ParamConst {
@ -415,29 +423,6 @@ impl<'db> PlaceholderLike<DbInterner<'db>> for PlaceholderConst {
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for ExprConst {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
// Ensure we get back to this when we fill in the fields
let ExprConst = &self;
V::Result::output()
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for ExprConst {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(ExprConst)
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
ExprConst
}
}
impl<'db> Relate<DbInterner<'db>> for ExprConst {
fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
relation: &mut R,

View file

@ -1,8 +1,8 @@
//! Definition of `SolverDefId`
use hir_def::{
AdtId, CallableDefId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId,
StaticId, StructId, TraitId, TypeAliasId, UnionId,
AdtId, CallableDefId, ConstId, EnumId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId,
ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
};
use rustc_type_ir::inherent;
use stdx::impl_from;
@ -119,6 +119,16 @@ impl From<GenericDefId> for SolverDefId {
}
}
impl From<GeneralConstId> for SolverDefId {
#[inline]
fn from(value: GeneralConstId) -> Self {
match value {
GeneralConstId::ConstId(const_id) => SolverDefId::ConstId(const_id),
GeneralConstId::StaticId(static_id) => SolverDefId::StaticId(static_id),
}
}
}
impl TryFrom<SolverDefId> for GenericDefId {
type Error = SolverDefId;
@ -139,6 +149,26 @@ impl TryFrom<SolverDefId> for GenericDefId {
}
}
impl SolverDefId {
#[inline]
#[track_caller]
pub fn expect_opaque_ty(self) -> InternedOpaqueTyId {
match self {
SolverDefId::InternedOpaqueTyId(it) => it,
_ => panic!("expected opaque type, found {self:?}"),
}
}
#[inline]
#[track_caller]
pub fn expect_type_alias(self) -> TypeAliasId {
match self {
SolverDefId::TypeAliasId(it) => it,
_ => panic!("expected type alias, found {self:?}"),
}
}
}
impl<'db> inherent::DefId<DbInterner<'db>> for SolverDefId {
fn as_local(self) -> Option<SolverDefId> {
Some(self)

View file

@ -129,3 +129,26 @@ where
if p.has_vars_bound_at_or_above(self.current_index) { p.super_fold_with(self) } else { p }
}
}
pub fn fold_tys<'db, T: TypeFoldable<DbInterner<'db>>>(
interner: DbInterner<'db>,
t: T,
callback: impl FnMut(Ty<'db>) -> Ty<'db>,
) -> T {
struct Folder<'db, F> {
interner: DbInterner<'db>,
callback: F,
}
impl<'db, F: FnMut(Ty<'db>) -> Ty<'db>> TypeFolder<DbInterner<'db>> for Folder<'db, F> {
fn cx(&self) -> DbInterner<'db> {
self.interner
}
fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
let t = t.super_fold_with(self);
(self.callback)(t)
}
}
t.fold_with(&mut Folder { interner, callback })
}

View file

@ -1,7 +1,9 @@
//! Things related to generic args in the next-trait-solver.
use hir_def::GenericParamId;
use hir_def::{GenericDefId, GenericParamId};
use intern::{Interned, Symbol};
use macros::{TypeFoldable, TypeVisitable};
use rustc_type_ir::inherent::Const as _;
use rustc_type_ir::{
ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSig, FnSigTys,
GenericArgKind, IntTy, Interner, TermKind, TyKind, TyVid, TypeFoldable, TypeVisitable,
@ -22,7 +24,7 @@ use super::{
interned_vec_db,
};
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub enum GenericArg<'db> {
Ty(Ty<'db>),
Lifetime(Region<'db>),
@ -54,12 +56,27 @@ impl<'db> GenericArg<'db> {
}
}
pub fn konst(self) -> Option<Const<'db>> {
match self.kind() {
GenericArgKind::Const(konst) => Some(konst),
_ => None,
}
}
pub fn region(self) -> Option<Region<'db>> {
match self.kind() {
GenericArgKind::Lifetime(r) => Some(r),
_ => None,
}
}
pub fn error_from_id(interner: DbInterner<'db>, id: GenericParamId) -> GenericArg<'db> {
match id {
GenericParamId::TypeParamId(_) => Ty::new_error(interner, ErrorGuaranteed).into(),
GenericParamId::ConstParamId(_) => Const::error(interner).into(),
GenericParamId::LifetimeParamId(_) => Region::error(interner).into(),
}
}
}
impl<'db> From<Term<'db>> for GenericArg<'db> {
@ -71,7 +88,7 @@ impl<'db> From<Term<'db>> for GenericArg<'db> {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub enum Term<'db> {
Ty(Ty<'db>),
Const(Const<'db>),
@ -129,39 +146,6 @@ impl<'db> IntoKind for GenericArg<'db> {
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for GenericArg<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
match self {
GenericArg::Lifetime(lt) => lt.visit_with(visitor),
GenericArg::Ty(ty) => ty.visit_with(visitor),
GenericArg::Const(ct) => ct.visit_with(visitor),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for GenericArg<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
match self.kind() {
GenericArgKind::Lifetime(lt) => lt.try_fold_with(folder).map(Into::into),
GenericArgKind::Type(ty) => ty.try_fold_with(folder).map(Into::into),
GenericArgKind::Const(ct) => ct.try_fold_with(folder).map(Into::into),
}
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
GenericArgKind::Lifetime(lt) => lt.fold_with(folder).into(),
GenericArgKind::Type(ty) => ty.fold_with(folder).into(),
GenericArgKind::Const(ct) => ct.fold_with(folder).into(),
}
}
}
impl<'db> Relate<DbInterner<'db>> for GenericArg<'db> {
fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
relation: &mut R,
@ -216,6 +200,66 @@ impl<'db> GenericArgs<'db> {
interner.mk_args(&args)
}
/// Creates an all-error `GenericArgs`.
pub fn error_for_item(interner: DbInterner<'db>, def_id: SolverDefId) -> GenericArgs<'db> {
GenericArgs::for_item(interner, def_id, |_, _, id, _| {
GenericArg::error_from_id(interner, id)
})
}
/// Like `for_item`, but prefers the default of a parameter if it has any.
pub fn for_item_with_defaults<F>(
interner: DbInterner<'db>,
def_id: GenericDefId,
mut fallback: F,
) -> GenericArgs<'db>
where
F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>,
{
let defaults = interner.db.generic_defaults_ns(def_id);
Self::for_item(interner, def_id.into(), |name, idx, id, prev| {
match defaults.get(idx as usize) {
Some(default) => default.instantiate(interner, prev),
None => fallback(name, idx, id, prev),
}
})
}
/// Like `for_item()`, but calls first uses the args from `first`.
pub fn fill_rest<F>(
interner: DbInterner<'db>,
def_id: SolverDefId,
first: impl IntoIterator<Item = GenericArg<'db>>,
mut fallback: F,
) -> GenericArgs<'db>
where
F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>,
{
let mut iter = first.into_iter();
Self::for_item(interner, def_id, |name, idx, id, prev| {
iter.next().unwrap_or_else(|| fallback(name, idx, id, prev))
})
}
/// Appends default param values to `first` if needed. Params without default will call `fallback()`.
pub fn fill_with_defaults<F>(
interner: DbInterner<'db>,
def_id: GenericDefId,
first: impl IntoIterator<Item = GenericArg<'db>>,
mut fallback: F,
) -> GenericArgs<'db>
where
F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>,
{
let defaults = interner.db.generic_defaults_ns(def_id);
Self::fill_rest(interner, def_id.into(), first, |name, idx, id, prev| {
defaults
.get(idx as usize)
.map(|default| default.instantiate(interner, prev))
.unwrap_or_else(|| fallback(name, idx, id, prev))
})
}
fn fill_item<F>(
args: &mut SmallVec<[GenericArg<'db>; 8]>,
interner: DbInterner<'_>,
@ -271,6 +315,18 @@ impl<'db> GenericArgs<'db> {
}
}
}
pub fn types(self) -> impl Iterator<Item = Ty<'db>> {
self.iter().filter_map(|it| it.as_type())
}
pub fn consts(self) -> impl Iterator<Item = Const<'db>> {
self.iter().filter_map(|it| it.as_const())
}
pub fn regions(self) -> impl Iterator<Item = Region<'db>> {
self.iter().filter_map(|it| it.as_region())
}
}
impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
@ -487,36 +543,6 @@ impl<'db> From<Const<'db>> for Term<'db> {
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Term<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
match self {
Term::Ty(ty) => ty.visit_with(visitor),
Term::Const(ct) => ct.visit_with(visitor),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for Term<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
match self.kind() {
TermKind::Ty(ty) => ty.try_fold_with(folder).map(Into::into),
TermKind::Const(ct) => ct.try_fold_with(folder).map(Into::into),
}
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
TermKind::Ty(ty) => ty.fold_with(folder).into(),
TermKind::Const(ct) => ct.fold_with(folder).into(),
}
}
}
impl<'db> Relate<DbInterner<'db>> for Term<'db> {
fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
relation: &mut R,

View file

@ -10,6 +10,7 @@ pub use at::DefineOpaqueTypes;
use ena::undo_log::UndoLogs;
use ena::unify as ut;
use hir_def::GenericParamId;
use hir_def::lang_item::LangItem;
use intern::Symbol;
use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage};
use region_constraints::{
@ -18,6 +19,7 @@ use region_constraints::{
pub use relate::StructurallyRelateAliases;
pub use relate::combine::PredicateEmittingRelation;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_next_trait_solver::solve::SolverDelegateEvalExt;
use rustc_pattern_analysis::Captures;
use rustc_type_ir::error::{ExpectedFound, TypeError};
use rustc_type_ir::inherent::{
@ -38,7 +40,10 @@ use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey};
use crate::next_solver::fold::BoundVarReplacerDelegate;
use crate::next_solver::infer::opaque_types::table::OpaqueTypeStorageEntries;
use crate::next_solver::{BoundConst, BoundRegion, BoundTy, BoundVarKind};
use crate::next_solver::infer::select::EvaluationResult;
use crate::next_solver::infer::traits::PredicateObligation;
use crate::next_solver::obligation_ctxt::ObligationCtxt;
use crate::next_solver::{BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, SolverContext};
use super::generics::GenericParamDef;
use super::{
@ -62,7 +67,7 @@ pub(crate) mod traits;
mod type_variable;
mod unify_key;
/// `InferOk<'tcx, ()>` is used a lot. It may seem like a useless wrapper
/// `InferOk<'db, ()>` is used a lot. It may seem like a useless wrapper
/// around `PredicateObligations`, but it has one important property:
/// because `InferOk` is marked with `#[must_use]`, if you have a method
/// `InferCtxt::f` that returns `InferResult<()>` and you call it with
@ -395,6 +400,102 @@ impl<'db> InferCtxt<'db> {
self.typing_mode
}
/// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank)
/// for more details.
pub fn predicate_may_hold_opaque_types_jank(
&self,
obligation: &PredicateObligation<'db>,
) -> bool {
<&SolverContext<'db>>::from(self).root_goal_may_hold_opaque_types_jank(Goal::new(
self.interner,
obligation.param_env,
obligation.predicate,
))
}
/// Evaluates whether the predicate can be satisfied in the given
/// `ParamEnv`, and returns `false` if not certain. However, this is
/// not entirely accurate if inference variables are involved.
///
/// This version may conservatively fail when outlives obligations
/// are required. Therefore, this version should only be used for
/// optimizations or diagnostics and be treated as if it can always
/// return `false`.
///
/// # Example
///
/// ```
/// # #![allow(dead_code)]
/// trait Trait {}
///
/// fn check<T: Trait>() {}
///
/// fn foo<T: 'static>()
/// where
/// &'static T: Trait,
/// {
/// // Evaluating `&'?0 T: Trait` adds a `'?0: 'static` outlives obligation,
/// // which means that `predicate_must_hold_considering_regions` will return
/// // `false`.
/// check::<&'_ T>();
/// }
/// ```
fn predicate_must_hold_considering_regions(
&self,
obligation: &PredicateObligation<'db>,
) -> bool {
self.evaluate_obligation(obligation).must_apply_considering_regions()
}
/// Evaluates whether the predicate can be satisfied in the given
/// `ParamEnv`, and returns `false` if not certain. However, this is
/// not entirely accurate if inference variables are involved.
///
/// This version ignores all outlives constraints.
fn predicate_must_hold_modulo_regions(&self, obligation: &PredicateObligation<'db>) -> bool {
self.evaluate_obligation(obligation).must_apply_modulo_regions()
}
/// Evaluate a given predicate, capturing overflow and propagating it back.
fn evaluate_obligation(&self, obligation: &PredicateObligation<'db>) -> EvaluationResult {
let param_env = obligation.param_env;
self.probe(|snapshot| {
let mut ocx = ObligationCtxt::new(self);
ocx.register_obligation(obligation.clone());
let mut result = EvaluationResult::EvaluatedToOk;
for error in ocx.evaluate_obligations_error_on_ambiguity() {
if error.is_true_error() {
return EvaluationResult::EvaluatedToErr;
} else {
result = result.max(EvaluationResult::EvaluatedToAmbig);
}
}
if self.opaque_types_added_in_snapshot(snapshot) {
result = result.max(EvaluationResult::EvaluatedToOkModuloOpaqueTypes);
} else if self.region_constraints_added_in_snapshot(snapshot) {
result = result.max(EvaluationResult::EvaluatedToOkModuloRegions);
}
result
})
}
pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
let ty = self.resolve_vars_if_possible(ty);
let Some(copy_def_id) =
LangItem::Copy.resolve_trait(self.interner.db, self.interner.krate.unwrap())
else {
return false;
};
// This can get called from typeck (by euv), and `moves_by_default`
// rightly refuses to work with inference variables, but
// moves_by_default has a cache, which we want to use in other
// cases.
traits::type_known_to_meet_bound_modulo_regions(self, param_env, ty, copy_def_id)
}
pub fn unresolved_variables(&self) -> Vec<Ty<'db>> {
let mut inner = self.inner.borrow_mut();
let mut vars: Vec<Ty<'db>> = inner
@ -682,6 +783,17 @@ impl<'db> InferCtxt<'db> {
})
}
/// Like `fresh_args_for_item()`, but first uses the args from `first`.
pub fn fill_rest_fresh_args(
&self,
def_id: SolverDefId,
first: impl IntoIterator<Item = GenericArg<'db>>,
) -> GenericArgs<'db> {
GenericArgs::fill_rest(self.interner, def_id, first, |name, index, kind, _| {
self.var_for_def(kind, name)
})
}
/// Returns `true` if errors have been reported since this infcx was
/// created. This is sometimes used as a heuristic to skip
/// reporting errors that often occur as a result of earlier

View file

@ -2,11 +2,12 @@
use rustc_type_ir::{
ConstKind, FallibleTypeFolder, InferConst, InferTy, RegionKind, TyKind, TypeFoldable,
TypeFolder, TypeSuperFoldable, TypeVisitableExt, data_structures::DelayedMap,
inherent::IntoKind,
TypeFolder, TypeSuperFoldable, TypeVisitableExt,
data_structures::DelayedMap,
inherent::{Const as _, IntoKind, Ty as _},
};
use crate::next_solver::{Const, DbInterner, Region, Ty};
use crate::next_solver::{Const, DbInterner, ErrorGuaranteed, Region, Ty};
use super::{FixupError, FixupResult, InferCtxt};
@ -60,3 +61,48 @@ impl<'a, 'db> TypeFolder<DbInterner<'db>> for OpportunisticVarResolver<'a, 'db>
}
}
}
pub struct ReplaceInferWithError<'db> {
interner: DbInterner<'db>,
}
impl<'db> ReplaceInferWithError<'db> {
#[inline]
pub fn new(interner: DbInterner<'db>) -> Self {
Self { interner }
}
}
impl<'db> TypeFolder<DbInterner<'db>> for ReplaceInferWithError<'db> {
fn cx(&self) -> DbInterner<'db> {
self.interner
}
fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
if !t.has_infer() {
return t;
}
if t.is_infer() {
Ty::new_error(self.interner, ErrorGuaranteed)
} else {
t.super_fold_with(self)
}
}
fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
if !c.has_infer() {
return c;
}
if c.is_ct_infer() {
Const::new_error(self.interner, ErrorGuaranteed)
} else {
c.super_fold_with(self)
}
}
fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
if r.is_var() { Region::error(self.interner) } else { r }
}
}

View file

@ -1,6 +1,7 @@
use std::ops::ControlFlow;
use hir_def::{ImplId, TraitId};
use macros::{TypeFoldable, TypeVisitable};
use rustc_type_ir::{
Interner,
solve::{BuiltinImplSource, CandidateSource, Certainty, inspect::ProbeKind},
@ -12,6 +13,7 @@ use crate::{
Const, ErrorGuaranteed, GenericArgs, Goal, TraitRef, Ty, TypeError,
infer::{
InferCtxt,
select::EvaluationResult::*,
traits::{Obligation, ObligationCause, PredicateObligation, TraitObligation},
},
inspect::{InspectCandidate, InspectGoal, ProofTreeVisitor},
@ -47,6 +49,83 @@ pub enum NotConstEvaluatable {
MentionsParam,
}
/// The result of trait evaluation. The order is important
/// here as the evaluation of a list is the maximum of the
/// evaluations.
///
/// The evaluation results are ordered:
/// - `EvaluatedToOk` implies `EvaluatedToOkModuloRegions`
/// implies `EvaluatedToAmbig` implies `EvaluatedToAmbigStackDependent`
/// - the "union" of evaluation results is equal to their maximum -
/// all the "potential success" candidates can potentially succeed,
/// so they are noops when unioned with a definite error, and within
/// the categories it's easy to see that the unions are correct.
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub enum EvaluationResult {
/// Evaluation successful.
EvaluatedToOk,
/// Evaluation successful, but there were unevaluated region obligations.
EvaluatedToOkModuloRegions,
/// Evaluation successful, but need to rerun because opaque types got
/// hidden types assigned without it being known whether the opaque types
/// are within their defining scope
EvaluatedToOkModuloOpaqueTypes,
/// Evaluation is known to be ambiguous -- it *might* hold for some
/// assignment of inference variables, but it might not.
///
/// While this has the same meaning as `EvaluatedToAmbigStackDependent` -- we can't
/// know whether this obligation holds or not -- it is the result we
/// would get with an empty stack, and therefore is cacheable.
EvaluatedToAmbig,
/// Evaluation failed because of recursion involving inference
/// variables. We are somewhat imprecise there, so we don't actually
/// know the real result.
///
/// This can't be trivially cached because the result depends on the
/// stack results.
EvaluatedToAmbigStackDependent,
/// Evaluation failed.
EvaluatedToErr,
}
impl EvaluationResult {
/// Returns `true` if this evaluation result is known to apply, even
/// considering outlives constraints.
pub fn must_apply_considering_regions(self) -> bool {
self == EvaluatedToOk
}
/// Returns `true` if this evaluation result is known to apply, ignoring
/// outlives constraints.
pub fn must_apply_modulo_regions(self) -> bool {
self <= EvaluatedToOkModuloRegions
}
pub fn may_apply(self) -> bool {
match self {
EvaluatedToOkModuloOpaqueTypes
| EvaluatedToOk
| EvaluatedToOkModuloRegions
| EvaluatedToAmbig
| EvaluatedToAmbigStackDependent => true,
EvaluatedToErr => false,
}
}
pub fn is_stack_dependent(self) -> bool {
match self {
EvaluatedToAmbigStackDependent => true,
EvaluatedToOkModuloOpaqueTypes
| EvaluatedToOk
| EvaluatedToOkModuloRegions
| EvaluatedToAmbig
| EvaluatedToErr => false,
}
}
}
/// Indicates that trait evaluation caused overflow and in which pass.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum OverflowError {
@ -99,7 +178,7 @@ pub type SelectionResult<'db, T> = Result<Option<T>, SelectionError<'db>>;
/// ### The type parameter `N`
///
/// See explanation on `ImplSourceUserDefinedData`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub enum ImplSource<'db, N> {
/// ImplSource identifying a particular impl.
UserDefined(ImplSourceUserDefinedData<'db, N>),
@ -164,8 +243,10 @@ impl<'db, N> ImplSource<'db, N> {
/// is `Obligation`, as one might expect. During codegen, however, this
/// is `()`, because codegen only requires a shallow resolution of an
/// impl, and nested obligations are satisfied later.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub struct ImplSourceUserDefinedData<'db, N> {
#[type_visitable(ignore)]
#[type_foldable(identity)]
pub impl_def_id: ImplId,
pub args: GenericArgs<'db>,
pub nested: Vec<N>,

View file

@ -109,4 +109,17 @@ impl<'db> InferCtxt<'db> {
self.rollback_to(snapshot);
r
}
/// Scan the constraints produced since `snapshot` and check whether
/// we added any region constraints.
pub fn region_constraints_added_in_snapshot(&self, snapshot: &CombinedSnapshot) -> bool {
self.inner
.borrow_mut()
.unwrap_region_constraints()
.region_constraints_added_in_snapshot(&snapshot.undo_snapshot)
}
pub fn opaque_types_added_in_snapshot(&self, snapshot: &CombinedSnapshot) -> bool {
self.inner.borrow().undo_log.opaque_types_in_snapshot(&snapshot.undo_snapshot)
}
}

View file

@ -7,16 +7,19 @@ use std::{
hash::{Hash, Hasher},
};
use hir_def::TraitId;
use macros::{TypeFoldable, TypeVisitable};
use rustc_type_ir::elaborate::Elaboratable;
use rustc_type_ir::{
PredicatePolarity, Upcast,
solve::{Certainty, NoSolution},
};
use rustc_type_ir::{TypeFoldable, TypeVisitable};
use tracing::debug;
use crate::next_solver::{
Binder, Clause, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, SolverDefId, Span,
TraitPredicate, Ty,
TraitPredicate, TraitRef, Ty,
};
use super::InferCtxt;
@ -63,8 +66,10 @@ impl ObligationCause {
/// either identifying an `impl` (e.g., `impl Eq for i32`) that
/// satisfies the obligation, or else finding a bound that is in
/// scope. The eventual result is usually a `Selection` (defined below).
#[derive(Clone, Debug)]
#[derive(Clone, Debug, TypeVisitable, TypeFoldable)]
pub struct Obligation<'db, T> {
#[type_foldable(identity)]
#[type_visitable(ignore)]
/// The reason we have to prove this thing.
pub cause: ObligationCause,
@ -115,39 +120,6 @@ impl<'db> Elaboratable<DbInterner<'db>> for PredicateObligation<'db> {
}
}
impl<'db, T: TypeVisitable<DbInterner<'db>>> TypeVisitable<DbInterner<'db>> for Obligation<'db, T> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
rustc_ast_ir::try_visit!(self.param_env.visit_with(visitor));
self.predicate.visit_with(visitor)
}
}
impl<'db, T: TypeFoldable<DbInterner<'db>>> TypeFoldable<DbInterner<'db>> for Obligation<'db, T> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(Obligation {
cause: self.cause.clone(),
param_env: self.param_env.try_fold_with(folder)?,
predicate: self.predicate.try_fold_with(folder)?,
recursion_depth: self.recursion_depth,
})
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
Obligation {
cause: self.cause.clone(),
param_env: self.param_env.fold_with(folder),
predicate: self.predicate.fold_with(folder),
recursion_depth: self.recursion_depth,
}
}
}
impl<'db, T: Copy> Obligation<'db, T> {
pub fn as_goal(&self) -> Goal<'db, T> {
Goal { param_env: self.param_env, predicate: self.predicate }
@ -237,3 +209,35 @@ impl<'db, O> Obligation<'db, O> {
Obligation::with_depth(tcx, self.cause.clone(), self.recursion_depth, self.param_env, value)
}
}
/// Determines whether the type `ty` is known to meet `bound` and
/// returns true if so. Returns false if `ty` either does not meet
/// `bound` or is not known to meet bound (note that this is
/// conservative towards *no impl*, which is the opposite of the
/// `evaluate` methods).
pub fn type_known_to_meet_bound_modulo_regions<'tcx>(
infcx: &InferCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: TraitId,
) -> bool {
let trait_ref = TraitRef::new(infcx.interner, def_id.into(), [ty]);
pred_known_to_hold_modulo_regions(infcx, param_env, trait_ref)
}
/// FIXME(@lcnr): this function doesn't seem right and shouldn't exist?
///
/// Ping me on zulip if you want to use this method and need help with finding
/// an appropriate replacement.
fn pred_known_to_hold_modulo_regions<'db>(
infcx: &InferCtxt<'db>,
param_env: ParamEnv<'db>,
pred: impl Upcast<DbInterner<'db>, Predicate<'db>>,
) -> bool {
let obligation = Obligation::new(infcx.interner, ObligationCause::dummy(), param_env, pred);
let result = infcx.evaluate_obligation(&obligation);
debug!(?result);
result.must_apply_modulo_regions()
}

View file

@ -1,75 +1,81 @@
//! Things related to the Interner in the next-trait-solver.
#![allow(unused)]
#![allow(unused)] // FIXME(next-solver): Remove this.
use std::{fmt, ops::ControlFlow};
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
use base_db::Crate;
use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variances};
use hir_def::lang_item::LangItem;
use hir_def::signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags};
use hir_def::{AdtId, BlockId, GenericDefId, TypeAliasId, VariantId};
use hir_def::{AttrDefId, Lookup};
use hir_def::{CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId};
use hir_def::{
AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, GenericDefId, ItemContainerId, Lookup,
StructId, TypeAliasId, UnionId, VariantId,
lang_item::LangItem,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
use intern::sym::non_exhaustive;
use intern::{Interned, impl_internable, sym};
use la_arena::Idx;
use rustc_abi::{Align, ReprFlags, ReprOptions};
use rustc_ast_ir::visit::VisitorResult;
use rustc_hash::FxHashSet;
use rustc_index::bit_set::DenseBitSet;
use rustc_type_ir::elaborate::elaborate;
use rustc_type_ir::error::TypeError;
use rustc_type_ir::inherent::{
AdtDef as _, GenericArgs as _, GenericsOf, IntoKind, SliceLike as _, Span as _,
};
use rustc_type_ir::lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem};
use rustc_type_ir::solve::SizedTraitKind;
use rustc_index::{IndexVec, bit_set::DenseBitSet};
use rustc_type_ir::{
AliasTerm, AliasTermKind, AliasTy, AliasTyKind, EarlyBinder, FlagComputation, Flags,
ImplPolarity, InferTy, ProjectionPredicate, TraitPredicate, TraitRef, Upcast,
AliasTerm, AliasTermKind, AliasTy, AliasTyKind, BoundVar, CollectAndApply, DebruijnIndex,
EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy,
ProjectionPredicate, RegionKind, TermKind, TraitPredicate, TraitRef, TypeVisitableExt,
UniverseIndex, Upcast, Variance, WithCachedTypeInfo,
elaborate::{self, elaborate},
error::TypeError,
inherent::{
self, AdtDef as _, Const as _, GenericArgs as _, GenericsOf, IntoKind, ParamEnv as _,
Region as _, SliceLike as _, Span as _, Ty as _,
},
ir_print,
lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem},
relate,
solve::SizedTraitKind,
};
use salsa::plumbing::AsId;
use smallvec::{SmallVec, smallvec};
use std::fmt;
use std::ops::ControlFlow;
use syntax::ast::SelfParamKind;
use tracing::debug;
use triomphe::Arc;
use rustc_ast_ir::visit::VisitorResult;
use rustc_index::IndexVec;
use rustc_type_ir::TypeVisitableExt;
use rustc_type_ir::{
BoundVar, CollectAndApply, DebruijnIndex, GenericArgKind, RegionKind, TermKind, UniverseIndex,
Variance, WithCachedTypeInfo, elaborate,
inherent::{self, Const as _, Region as _, Ty as _},
ir_print, relate,
use crate::{
ConstScalar, FnAbi, Interner,
db::HirDatabase,
lower_nextsolver::{self, TyLoweringContext},
method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint},
next_solver::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug,
RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
TypingMode,
infer::{
DbInternerInferExt, InferCtxt,
traits::{Obligation, ObligationCause},
},
obligation_ctxt::ObligationCtxt,
util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls},
},
};
use crate::lower_nextsolver::{self, TyLoweringContext};
use crate::method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint};
use crate::next_solver::infer::InferCtxt;
use crate::next_solver::util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls};
use crate::next_solver::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug,
RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
};
use crate::{ConstScalar, FnAbi, Interner, db::HirDatabase};
use super::generics::generics;
use super::util::sizedness_constraint_for_ty;
use super::{
Binder, BoundExistentialPredicate, BoundExistentialPredicates, BoundTy, BoundTyKind, Clause,
Clauses, Const, ConstKind, ErrorGuaranteed, ExprConst, ExternalConstraints,
ClauseKind, Clauses, Const, ConstKind, ErrorGuaranteed, ExprConst, ExternalConstraints,
ExternalConstraintsData, GenericArg, GenericArgs, InternedClausesWrapper, ParamConst, ParamEnv,
ParamTy, PlaceholderConst, PlaceholderTy, PredefinedOpaques, PredefinedOpaquesData, Predicate,
PredicateKind, Term, Ty, TyKind, Tys, ValueConst,
PredicateKind, SolverDefId, Term, Ty, TyKind, Tys, Valtree, ValueConst,
abi::Safety,
fold::{BoundVarReplacer, BoundVarReplacerDelegate, FnMutDelegate},
generics::Generics,
generics::{Generics, generics},
mapping::ChalkToNextSolver,
region::{
BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, PlaceholderRegion, Region,
},
util::sizedness_constraint_for_ty,
};
use super::{ClauseKind, SolverDefId, Valtree};
#[macro_export]
#[doc(hidden)]
@ -127,11 +133,10 @@ macro_rules! _interned_vec_nolifetime_salsa {
pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> {
// SAFETY: ¯\_(ツ)_/¯
salsa::with_attached_database(|db| {
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -230,11 +235,10 @@ macro_rules! _interned_vec_db {
pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> {
// SAFETY: ¯\_(ツ)_/¯
salsa::with_attached_database(|db| {
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -285,14 +289,11 @@ unsafe impl Sync for DbInterner<'_> {}
impl<'db> DbInterner<'db> {
// FIXME(next-solver): remove this method
pub fn conjure() -> DbInterner<'db> {
salsa::with_attached_database(|db| DbInterner {
db: unsafe {
std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db.as_view())
},
crate::with_attached_db(|db| DbInterner {
db: unsafe { std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db) },
krate: None,
block: None,
})
.expect("db is expected to be attached")
}
pub fn new_with(
@ -303,6 +304,7 @@ impl<'db> DbInterner<'db> {
DbInterner { db, krate, block }
}
#[inline]
pub fn db(&self) -> &'db dyn HirDatabase {
self.db
}
@ -585,12 +587,11 @@ impl AdtDef {
}
pub fn inner(&self) -> &AdtDefInner {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.data_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn is_enum(&self) -> bool {
@ -708,21 +709,20 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
impl fmt::Debug for AdtDef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
salsa::with_attached_database(|db| match self.inner().id {
crate::with_attached_db(|db| match self.inner().id {
AdtId::StructId(struct_id) => {
let data = db.as_view::<dyn HirDatabase>().struct_signature(struct_id);
let data = db.struct_signature(struct_id);
f.write_str(data.name.as_str())
}
AdtId::UnionId(union_id) => {
let data = db.as_view::<dyn HirDatabase>().union_signature(union_id);
let data = db.union_signature(union_id);
f.write_str(data.name.as_str())
}
AdtId::EnumId(enum_id) => {
let data = db.as_view::<dyn HirDatabase>().enum_signature(enum_id);
let data = db.enum_signature(enum_id);
f.write_str(data.name.as_str())
}
})
.unwrap_or_else(|| f.write_str(&format!("AdtDef({:?})", self.inner().id)))
}
}
@ -778,13 +778,12 @@ impl<'db> Pattern<'db> {
}
pub fn inner(&self) -> &PatternKind<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -1020,17 +1019,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
self,
f: impl FnOnce(&mut rustc_type_ir::search_graph::GlobalCache<Self>) -> R,
) -> R {
salsa::with_attached_database(|db| {
tls_cache::with_cache(
unsafe {
std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(
db.as_view::<dyn HirDatabase>(),
)
},
f,
)
})
.unwrap()
tls_cache::with_cache(self.db, f)
}
fn canonical_param_env_cache_get_or_insert<R>(
@ -1118,7 +1107,15 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
fn alias_ty_kind(self, alias: rustc_type_ir::AliasTy<Self>) -> AliasTyKind {
match alias.def_id {
SolverDefId::InternedOpaqueTyId(_) => AliasTyKind::Opaque,
SolverDefId::TypeAliasId(_) => AliasTyKind::Projection,
SolverDefId::TypeAliasId(type_alias) => match type_alias.loc(self.db).container {
ItemContainerId::ImplId(impl_)
if self.db.impl_signature(impl_).target_trait.is_none() =>
{
AliasTyKind::Inherent
}
ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => AliasTyKind::Projection,
_ => AliasTyKind::Free,
},
_ => unimplemented!("Unexpected alias: {:?}", alias.def_id),
}
}
@ -1129,7 +1126,19 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
) -> rustc_type_ir::AliasTermKind {
match alias.def_id {
SolverDefId::InternedOpaqueTyId(_) => AliasTermKind::OpaqueTy,
SolverDefId::TypeAliasId(_) => AliasTermKind::ProjectionTy,
SolverDefId::TypeAliasId(type_alias) => match type_alias.loc(self.db).container {
ItemContainerId::ImplId(impl_)
if self.db.impl_signature(impl_).target_trait.is_none() =>
{
AliasTermKind::InherentTy
}
ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => {
AliasTermKind::ProjectionTy
}
_ => AliasTermKind::FreeTy,
},
// rustc creates an `AnonConst` for consts, and evaluates them with CTFE (normalizing projections
// via selection, similar to ours `find_matching_impl()`, and not with the trait solver), so mimic it.
SolverDefId::ConstId(_) => AliasTermKind::UnevaluatedConst,
_ => unimplemented!("Unexpected alias: {:?}", alias.def_id),
}
@ -1616,7 +1625,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
mut f: impl FnMut(Self::ImplId),
) {
let trait_ = trait_.0;
let self_ty_fp = TyFingerprint::for_trait_impl_ns(&self_ty);
let self_ty_fp = TyFingerprint::for_trait_impl(self_ty);
let fps: &[TyFingerprint] = match self_ty.kind() {
TyKind::Infer(InferTy::IntVar(..)) => &ALL_INT_FPS,
TyKind::Infer(InferTy::FloatVar(..)) => &ALL_FLOAT_FPS,
@ -1692,8 +1701,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
}
fn impl_is_default(self, impl_def_id: Self::ImplId) -> bool {
// FIXME
false
self.db.impl_signature(impl_def_id.0).is_default()
}
#[tracing::instrument(skip(self), ret)]
@ -1747,7 +1755,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
}
fn delay_bug(self, msg: impl ToString) -> Self::ErrorGuaranteed {
panic!("Bug encountered in next-trait-solver.")
panic!("Bug encountered in next-trait-solver: {}", msg.to_string())
}
fn is_general_coroutine(self, coroutine_def_id: Self::CoroutineId) -> bool {
@ -1907,7 +1915,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db().infer(func.into());
EarlyBinder::bind(infer.type_of_rpit[idx].to_nextsolver(self))
EarlyBinder::bind(infer.type_of_rpit[idx.to_nextsolver(self)])
}
crate::ImplTraitId::TypeAliasImplTrait(..)
| crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
@ -1945,7 +1953,11 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
false
}
fn impl_specializes(self, impl_def_id: Self::ImplId, victim_def_id: Self::ImplId) -> bool {
fn impl_specializes(
self,
specializing_impl_def_id: Self::ImplId,
parent_impl_def_id: Self::ImplId,
) -> bool {
false
}
@ -2106,6 +2118,117 @@ TrivialTypeTraversalImpls! {
Placeholder<BoundVar>,
}
mod tls_db {
use std::{cell::Cell, ptr::NonNull};
use crate::db::HirDatabase;
struct Attached {
database: Cell<Option<NonNull<dyn HirDatabase>>>,
}
impl Attached {
#[inline]
fn attach<R>(&self, db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
struct DbGuard<'s> {
state: Option<&'s Attached>,
}
impl<'s> DbGuard<'s> {
#[inline]
fn new(attached: &'s Attached, db: &dyn HirDatabase) -> Self {
match attached.database.get() {
Some(current_db) => {
let new_db = NonNull::from(db);
if !std::ptr::addr_eq(current_db.as_ptr(), new_db.as_ptr()) {
panic!(
"Cannot change attached database. This is likely a bug.\n\
If this is not a bug, you can use `attach_db_allow_change()`."
);
}
Self { state: None }
}
None => {
// Otherwise, set the database.
attached.database.set(Some(NonNull::from(db)));
Self { state: Some(attached) }
}
}
}
}
impl Drop for DbGuard<'_> {
#[inline]
fn drop(&mut self) {
// Reset database to null if we did anything in `DbGuard::new`.
if let Some(attached) = self.state {
attached.database.set(None);
}
}
}
let _guard = DbGuard::new(self, db);
op()
}
#[inline]
fn attach_allow_change<R>(&self, db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
struct DbGuard<'s> {
state: &'s Attached,
prev: Option<NonNull<dyn HirDatabase>>,
}
impl<'s> DbGuard<'s> {
#[inline]
fn new(attached: &'s Attached, db: &dyn HirDatabase) -> Self {
let prev = attached.database.replace(Some(NonNull::from(db)));
Self { state: attached, prev }
}
}
impl Drop for DbGuard<'_> {
#[inline]
fn drop(&mut self) {
self.state.database.set(self.prev);
}
}
let _guard = DbGuard::new(self, db);
op()
}
#[inline]
fn with<R>(&self, op: impl FnOnce(&dyn HirDatabase) -> R) -> R {
let db = self.database.get().expect("Try to use attached db, but not db is attached");
// SAFETY: The db is attached, so it must be valid.
op(unsafe { db.as_ref() })
}
}
thread_local! {
static GLOBAL_DB: Attached = const { Attached { database: Cell::new(None) } };
}
#[inline]
pub fn attach_db<R>(db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
GLOBAL_DB.with(|global_db| global_db.attach(db, op))
}
#[inline]
pub fn attach_db_allow_change<R>(db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
GLOBAL_DB.with(|global_db| global_db.attach_allow_change(db, op))
}
#[inline]
pub fn with_attached_db<R>(op: impl FnOnce(&dyn HirDatabase) -> R) -> R {
GLOBAL_DB.with(
#[inline]
|a| a.with(op),
)
}
}
mod tls_cache {
use crate::db::HirDatabase;

View file

@ -16,10 +16,10 @@ impl<'db> IrPrint<ty::AliasTy<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::AliasTy<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| match t.def_id {
crate::with_attached_db(|db| match t.def_id {
SolverDefId::TypeAliasId(id) => fmt.write_str(&format!(
"AliasTy({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args
)),
SolverDefId::InternedOpaqueTyId(id) => {
@ -27,7 +27,6 @@ impl<'db> IrPrint<ty::AliasTy<Self>> for DbInterner<'db> {
}
_ => panic!("Expected TypeAlias or OpaqueTy."),
})
.unwrap_or_else(|| fmt.write_str(&format!("AliasTy({:?}[{:?}])", t.def_id, t.args)))
}
}
@ -37,10 +36,10 @@ impl<'db> IrPrint<ty::AliasTerm<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::AliasTerm<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| match t.def_id {
crate::with_attached_db(|db| match t.def_id {
SolverDefId::TypeAliasId(id) => fmt.write_str(&format!(
"AliasTerm({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args
)),
SolverDefId::InternedOpaqueTyId(id) => {
@ -48,7 +47,6 @@ impl<'db> IrPrint<ty::AliasTerm<Self>> for DbInterner<'db> {
}
_ => panic!("Expected TypeAlias or OpaqueTy."),
})
.unwrap_or_else(|| fmt.write_str(&format!("AliasTerm({:?}[{:?}])", t.def_id, t.args)))
}
}
impl<'db> IrPrint<ty::TraitRef<Self>> for DbInterner<'db> {
@ -57,7 +55,7 @@ impl<'db> IrPrint<ty::TraitRef<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::TraitRef<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let trait_ = t.def_id.0;
let self_ty = &t.args.as_slice()[0];
let trait_args = &t.args.as_slice()[1..];
@ -65,18 +63,17 @@ impl<'db> IrPrint<ty::TraitRef<Self>> for DbInterner<'db> {
fmt.write_str(&format!(
"{:?}: {}",
self_ty,
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str()
db.trait_signature(trait_).name.as_str()
))
} else {
fmt.write_str(&format!(
"{:?}: {}<{:?}>",
self_ty,
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str(),
db.trait_signature(trait_).name.as_str(),
trait_args
))
}
})
.unwrap_or_else(|| fmt.write_str(&format!("TraitRef({:?}[{:?}])", t.def_id, t.args)))
}
}
impl<'db> IrPrint<ty::TraitPredicate<Self>> for DbInterner<'db> {
@ -118,17 +115,14 @@ impl<'db> IrPrint<ty::ExistentialTraitRef<Self>> for DbInterner<'db> {
t: &ty::ExistentialTraitRef<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let trait_ = t.def_id.0;
fmt.write_str(&format!(
"ExistentialTraitRef({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str(),
db.trait_signature(trait_).name.as_str(),
t.args
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!("ExistentialTraitRef({:?}[{:?}])", t.def_id, t.args))
})
}
}
impl<'db> IrPrint<ty::ExistentialProjection<Self>> for DbInterner<'db> {
@ -143,24 +137,18 @@ impl<'db> IrPrint<ty::ExistentialProjection<Self>> for DbInterner<'db> {
t: &ty::ExistentialProjection<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let id = match t.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Expected trait."),
};
fmt.write_str(&format!(
"ExistentialProjection(({:?}[{:?}]) -> {:?})",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args,
t.term
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!(
"ExistentialProjection(({:?}[{:?}]) -> {:?})",
t.def_id, t.args, t.term
))
})
}
}
impl<'db> IrPrint<ty::ProjectionPredicate<Self>> for DbInterner<'db> {
@ -175,24 +163,18 @@ impl<'db> IrPrint<ty::ProjectionPredicate<Self>> for DbInterner<'db> {
t: &ty::ProjectionPredicate<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let id = match t.projection_term.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Expected trait."),
};
fmt.write_str(&format!(
"ProjectionPredicate(({:?}[{:?}]) -> {:?})",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.projection_term.args,
t.term
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!(
"ProjectionPredicate(({:?}[{:?}]) -> {:?})",
t.projection_term.def_id, t.projection_term.args, t.term
))
})
}
}
impl<'db> IrPrint<ty::NormalizesTo<Self>> for DbInterner<'db> {

View file

@ -143,10 +143,29 @@ pub trait ChalkToNextSolver<'db, Out> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> Out;
}
impl<'db, A, OutA, B, OutB> ChalkToNextSolver<'db, (OutA, OutB)> for (A, B)
where
A: ChalkToNextSolver<'db, OutA>,
B: ChalkToNextSolver<'db, OutB>,
{
fn to_nextsolver(&self, interner: DbInterner<'db>) -> (OutA, OutB) {
(self.0.to_nextsolver(interner), self.1.to_nextsolver(interner))
}
}
pub trait NextSolverToChalk<'db, Out> {
fn to_chalk(self, interner: DbInterner<'db>) -> Out;
}
impl<'db, T, Out> NextSolverToChalk<'db, Option<Out>> for Option<T>
where
T: NextSolverToChalk<'db, Out>,
{
fn to_chalk(self, interner: DbInterner<'db>) -> Option<Out> {
self.map(|it| it.to_chalk(interner))
}
}
impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability {
fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Mutability {
match self {
@ -520,7 +539,7 @@ impl<'db> ChalkToNextSolver<'db, Const<'db>> for chalk_ir::Const<Interner> {
ConstScalar::Bytes(bytes, memory) => {
rustc_type_ir::ConstKind::Value(ValueConst::new(
data.ty.to_nextsolver(interner),
ConstBytes(bytes.clone(), memory.clone()),
ConstBytes { memory: bytes.clone(), memory_map: memory.clone() },
))
}
ConstScalar::UnevaluatedConst(c, subst) => {
@ -633,6 +652,16 @@ impl<'db> ChalkToNextSolver<'db, GenericArg<'db>> for chalk_ir::GenericArg<Inter
}
}
impl<'db> NextSolverToChalk<'db, crate::GenericArg> for GenericArg<'db> {
fn to_chalk(self, interner: DbInterner<'db>) -> crate::GenericArg {
match self {
GenericArg::Ty(ty) => ty.to_chalk(interner).cast(Interner),
GenericArg::Lifetime(region) => region.to_chalk(interner).cast(Interner),
GenericArg::Const(konst) => konst.to_chalk(interner).cast(Interner),
}
}
}
impl<'db> ChalkToNextSolver<'db, GenericArgs<'db>> for chalk_ir::Substitution<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArgs<'db> {
GenericArgs::new_from_iter(
@ -642,6 +671,17 @@ impl<'db> ChalkToNextSolver<'db, GenericArgs<'db>> for chalk_ir::Substitution<In
}
}
impl<'db> ChalkToNextSolver<'db, crate::lower_nextsolver::ImplTraitIdx<'db>>
for crate::ImplTraitIdx
{
fn to_nextsolver(
&self,
interner: DbInterner<'db>,
) -> crate::lower_nextsolver::ImplTraitIdx<'db> {
crate::lower_nextsolver::ImplTraitIdx::from_raw(self.into_raw())
}
}
impl<'db> NextSolverToChalk<'db, chalk_ir::Substitution<Interner>> for GenericArgs<'db> {
fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Substitution<Interner> {
convert_args_for_result(interner, self.as_slice())
@ -1670,8 +1710,10 @@ pub fn convert_const_for_result<'db>(
let bytes = value_const.value.inner();
let value = chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
// SAFETY: we will never actually use this without a database
interned: ConstScalar::Bytes(bytes.0.clone(), unsafe {
std::mem::transmute::<MemoryMap<'db>, MemoryMap<'static>>(bytes.1.clone())
interned: ConstScalar::Bytes(bytes.memory.clone(), unsafe {
std::mem::transmute::<MemoryMap<'db>, MemoryMap<'static>>(
bytes.memory_map.clone(),
)
}),
});
return chalk_ir::ConstData {

View file

@ -24,12 +24,11 @@ impl<'db> PredefinedOpaques<'db> {
}
pub fn inner(&self) -> &PredefinedOpaquesData<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -96,12 +95,11 @@ impl<'db> ExternalConstraints<'db> {
}
pub fn inner(&self) -> &ExternalConstraintsData<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}

View file

@ -3,6 +3,7 @@
use std::cmp::Ordering;
use intern::Interned;
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::try_visit;
use rustc_type_ir::{
self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags,
@ -232,13 +233,12 @@ impl<'db> Predicate<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
/// Flips the polarity of a Predicate.
@ -303,13 +303,12 @@ impl<'db> Clauses<'db> {
}
pub fn inner(&self) -> &InternedClausesWrapper<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.inner_(db);
// SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -426,7 +425,7 @@ impl<'db> rustc_type_ir::TypeSuperVisitable<DbInterner<'db>> for Clauses<'db> {
pub struct Clause<'db>(pub(crate) Predicate<'db>);
// We could cram the reveal into the clauses like rustc does, probably
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
pub struct ParamEnv<'db> {
pub(crate) clauses: Clauses<'db>,
}
@ -437,28 +436,6 @@ impl<'db> ParamEnv<'db> {
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for ParamEnv<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
try_visit!(self.clauses.visit_with(visitor));
V::Result::output()
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for ParamEnv<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(ParamEnv { clauses: self.clauses.try_fold_with(folder)? })
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
ParamEnv { clauses: self.clauses.fold_with(folder) }
}
}
impl<'db> rustc_type_ir::inherent::ParamEnv<DbInterner<'db>> for ParamEnv<'db> {
fn caller_bounds(self) -> impl rustc_type_ir::inherent::SliceLike<Item = Clause<'db>> {
self.clauses

View file

@ -3,7 +3,8 @@
use hir_def::LifetimeParamId;
use intern::{Interned, Symbol};
use rustc_type_ir::{
BoundVar, Flags, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable, VisitorResult,
BoundVar, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable,
VisitorResult,
inherent::{IntoKind, PlaceholderLike, SliceLike},
relate::Relate,
};
@ -17,25 +18,30 @@ use super::{
pub type RegionKind<'db> = rustc_type_ir::RegionKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_, debug)]
#[salsa::interned(constructor = new_)]
pub struct Region<'db> {
#[returns(ref)]
kind_: RegionKind<'db>,
}
impl std::fmt::Debug for Region<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
impl<'db> Region<'db> {
pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self {
Region::new_(interner.db(), kind)
}
pub fn inner(&self) -> &RegionKind<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) }
})
.unwrap()
}
pub fn new_early_param(
@ -57,6 +63,14 @@ impl<'db> Region<'db> {
Region::new(interner, RegionKind::ReErased)
}
pub fn new_bound(
interner: DbInterner<'db>,
index: DebruijnIndex,
bound: BoundRegion,
) -> Region<'db> {
Region::new(interner, RegionKind::ReBound(index, bound))
}
pub fn is_placeholder(&self) -> bool {
matches!(self.inner(), RegionKind::RePlaceholder(..))
}
@ -69,6 +83,10 @@ impl<'db> Region<'db> {
matches!(self.inner(), RegionKind::ReVar(_))
}
pub fn is_error(&self) -> bool {
matches!(self.inner(), RegionKind::ReError(_))
}
pub fn error(interner: DbInterner<'db>) -> Self {
Region::new(interner, RegionKind::ReError(ErrorGuaranteed))
}

View file

@ -149,13 +149,9 @@ impl<'db> SolverDelegate for SolverContext<'db> {
fn fetch_eligible_assoc_item(
&self,
goal_trait_ref: rustc_type_ir::TraitRef<Self::Interner>,
trait_assoc_def_id: <Self::Interner as rustc_type_ir::Interner>::DefId,
trait_assoc_def_id: SolverDefId,
impl_id: ImplIdWrapper,
) -> Result<Option<<Self::Interner as rustc_type_ir::Interner>::DefId>, ErrorGuaranteed> {
let trait_assoc_id = match trait_assoc_def_id {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Unexpected SolverDefId"),
};
) -> Result<Option<SolverDefId>, ErrorGuaranteed> {
let trait_ = self
.0
.interner
@ -167,18 +163,47 @@ impl<'db> SolverDelegate for SolverContext<'db> {
.def_id
.0;
let trait_data = trait_.trait_items(self.0.interner.db());
let id =
impl_id.0.impl_items(self.0.interner.db()).items.iter().find_map(|item| -> Option<_> {
match item {
(_, AssocItemId::TypeAliasId(type_alias)) => {
let name = &self.0.interner.db().type_alias_signature(*type_alias).name;
let found_trait_assoc_id = trait_data.associated_type_by_name(name)?;
(found_trait_assoc_id == trait_assoc_id).then_some(*type_alias)
}
_ => None,
}
});
Ok(id.map(SolverDefId::TypeAliasId))
let impl_items = impl_id.0.impl_items(self.0.interner.db());
let id = match trait_assoc_def_id {
SolverDefId::TypeAliasId(trait_assoc_id) => {
let trait_assoc_data = self.0.interner.db.type_alias_signature(trait_assoc_id);
impl_items
.items
.iter()
.find_map(|(impl_assoc_name, impl_assoc_id)| {
if let AssocItemId::TypeAliasId(impl_assoc_id) = *impl_assoc_id
&& *impl_assoc_name == trait_assoc_data.name
{
Some(impl_assoc_id)
} else {
None
}
})
.map(SolverDefId::TypeAliasId)
}
SolverDefId::ConstId(trait_assoc_id) => {
let trait_assoc_data = self.0.interner.db.const_signature(trait_assoc_id);
let trait_assoc_name = trait_assoc_data
.name
.as_ref()
.expect("unnamed consts should not get passed to the solver");
impl_items
.items
.iter()
.find_map(|(impl_assoc_name, impl_assoc_id)| {
if let AssocItemId::ConstId(impl_assoc_id) = *impl_assoc_id
&& impl_assoc_name == trait_assoc_name
{
Some(impl_assoc_id)
} else {
None
}
})
.map(SolverDefId::ConstId)
}
_ => panic!("Unexpected SolverDefId"),
};
Ok(id)
}
fn is_transmutable(
@ -200,9 +225,9 @@ impl<'db> SolverDelegate for SolverContext<'db> {
SolverDefId::StaticId(c) => GeneralConstId::StaticId(c),
_ => unreachable!(),
};
let subst = uv.args.to_chalk(self.interner);
let subst = uv.args;
let ec = self.cx().db.const_eval(c, subst, None).ok()?;
Some(ec.to_nextsolver(self.interner))
Some(ec)
}
fn compute_goal_fast_path(

View file

@ -3,18 +3,22 @@
use std::iter;
use std::ops::ControlFlow;
use hir_def::{GenericDefId, TypeOrConstParamId, TypeParamId};
use hir_def::{
AdtId, DefWithBodyId, GenericDefId, HasModule, TypeOrConstParamId, TypeParamId,
hir::generics::{TypeOrConstParamData, TypeParamProvenance},
lang_item::LangItem,
};
use hir_def::{TraitId, type_ref::Rawness};
use intern::{Interned, Symbol, sym};
use rustc_abi::{Float, Integer, Size};
use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
use rustc_type_ir::TyVid;
use rustc_type_ir::{
BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid, InferTy,
IntTy, IntVid, Interner, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
TypeVisitableExt, TypeVisitor, UintTy, WithCachedTypeInfo,
AliasTyKind, BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid,
InferTy, IntTy, IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
inherent::{
Abi, AdtDef, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike,
PlaceholderLike, Safety as _, SliceLike, Ty as _,
Abi, AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _,
IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _,
},
relate::Relate,
solve::SizedTraitKind,
@ -24,13 +28,14 @@ use salsa::plumbing::{AsId, FromId};
use smallvec::SmallVec;
use crate::{
FnAbi,
FnAbi, ImplTraitId,
db::HirDatabase,
interner::InternedWrapperNoDebug,
next_solver::{
CallableIdWrapper, ClosureIdWrapper, Const, CoroutineIdWrapper, FnSig, GenericArg,
PolyFnSig, TypeAliasIdWrapper,
AdtDef, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
abi::Safety,
mapping::ChalkToNextSolver,
util::{CoroutineArgsExt, IntegerTypeExt},
},
};
@ -66,13 +71,16 @@ impl<'db> Ty<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<TyKind<'db>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self {
Ty::new(interner, TyKind::Adt(AdtDef::new(adt_id, interner), args))
}
pub fn new_param(interner: DbInterner<'db>, id: TypeParamId, index: u32, name: Symbol) -> Self {
@ -337,6 +345,23 @@ impl<'db> Ty<'db> {
matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty())
}
#[inline]
pub fn is_raw_ptr(self) -> bool {
matches!(self.kind(), TyKind::RawPtr(..))
}
pub fn is_union(self) -> bool {
self.as_adt().is_some_and(|(adt, _)| matches!(adt, AdtId::UnionId(_)))
}
#[inline]
pub fn as_adt(self) -> Option<(AdtId, GenericArgs<'db>)> {
match self.kind() {
TyKind::Adt(adt_def, args) => Some((adt_def.def_id().0, args)),
_ => None,
}
}
#[inline]
pub fn ty_vid(self) -> Option<TyVid> {
match self.kind() {
@ -370,8 +395,245 @@ impl<'db> Ty<'db> {
/// Whether the type contains some non-lifetime, aka. type or const, error type.
pub fn references_non_lt_error(self) -> bool {
self.references_error() && self.visit_with(&mut ReferencesNonLifetimeError).is_break()
references_non_lt_error(&self)
}
pub fn callable_sig(self, interner: DbInterner<'db>) -> Option<Binder<'db, FnSig<'db>>> {
match self.kind() {
TyKind::FnDef(callable, args) => {
Some(interner.fn_sig(callable).instantiate(interner, args))
}
TyKind::FnPtr(sig, hdr) => Some(sig.with(hdr)),
TyKind::Closure(closure_id, closure_args) => closure_args
.split_closure_args_untupled()
.closure_sig_as_fn_ptr_ty
.callable_sig(interner),
_ => None,
}
}
pub fn as_reference(self) -> Option<(Ty<'db>, Region<'db>, Mutability)> {
match self.kind() {
TyKind::Ref(region, ty, mutability) => Some((ty, region, mutability)),
_ => None,
}
}
pub fn as_reference_or_ptr(self) -> Option<(Ty<'db>, Rawness, Mutability)> {
match self.kind() {
TyKind::Ref(_, ty, mutability) => Some((ty, Rawness::Ref, mutability)),
TyKind::RawPtr(ty, mutability) => Some((ty, Rawness::RawPtr, mutability)),
_ => None,
}
}
pub fn as_tuple(self) -> Option<Tys<'db>> {
match self.kind() {
TyKind::Tuple(tys) => Some(tys),
_ => None,
}
}
pub fn dyn_trait(self) -> Option<TraitId> {
let TyKind::Dynamic(bounds, _) = self.kind() else { return None };
Some(bounds.principal_def_id()?.0)
}
pub fn strip_references(self) -> Ty<'db> {
let mut t = self;
while let TyKind::Ref(_lifetime, ty, _mutability) = t.kind() {
t = ty;
}
t
}
pub fn strip_reference(self) -> Ty<'db> {
self.as_reference().map_or(self, |(ty, _, _)| ty)
}
/// Replace infer vars with errors.
///
/// This needs to be called for every type that may contain infer vars and is yielded to outside inference,
/// as things other than inference do not expect to see infer vars.
pub fn replace_infer_with_error(self, interner: DbInterner<'db>) -> Ty<'db> {
self.fold_with(&mut crate::next_solver::infer::resolve::ReplaceInferWithError::new(
interner,
))
}
pub fn from_builtin_type(
interner: DbInterner<'db>,
ty: hir_def::builtin_type::BuiltinType,
) -> Ty<'db> {
let kind = match ty {
hir_def::builtin_type::BuiltinType::Char => TyKind::Char,
hir_def::builtin_type::BuiltinType::Bool => TyKind::Bool,
hir_def::builtin_type::BuiltinType::Str => TyKind::Str,
hir_def::builtin_type::BuiltinType::Int(int) => TyKind::Int(match int {
hir_def::builtin_type::BuiltinInt::Isize => rustc_type_ir::IntTy::Isize,
hir_def::builtin_type::BuiltinInt::I8 => rustc_type_ir::IntTy::I8,
hir_def::builtin_type::BuiltinInt::I16 => rustc_type_ir::IntTy::I16,
hir_def::builtin_type::BuiltinInt::I32 => rustc_type_ir::IntTy::I32,
hir_def::builtin_type::BuiltinInt::I64 => rustc_type_ir::IntTy::I64,
hir_def::builtin_type::BuiltinInt::I128 => rustc_type_ir::IntTy::I128,
}),
hir_def::builtin_type::BuiltinType::Uint(uint) => TyKind::Uint(match uint {
hir_def::builtin_type::BuiltinUint::Usize => rustc_type_ir::UintTy::Usize,
hir_def::builtin_type::BuiltinUint::U8 => rustc_type_ir::UintTy::U8,
hir_def::builtin_type::BuiltinUint::U16 => rustc_type_ir::UintTy::U16,
hir_def::builtin_type::BuiltinUint::U32 => rustc_type_ir::UintTy::U32,
hir_def::builtin_type::BuiltinUint::U64 => rustc_type_ir::UintTy::U64,
hir_def::builtin_type::BuiltinUint::U128 => rustc_type_ir::UintTy::U128,
}),
hir_def::builtin_type::BuiltinType::Float(float) => TyKind::Float(match float {
hir_def::builtin_type::BuiltinFloat::F16 => rustc_type_ir::FloatTy::F16,
hir_def::builtin_type::BuiltinFloat::F32 => rustc_type_ir::FloatTy::F32,
hir_def::builtin_type::BuiltinFloat::F64 => rustc_type_ir::FloatTy::F64,
hir_def::builtin_type::BuiltinFloat::F128 => rustc_type_ir::FloatTy::F128,
}),
};
Ty::new(interner, kind)
}
pub fn as_builtin(self) -> Option<hir_def::builtin_type::BuiltinType> {
let builtin = match self.kind() {
TyKind::Char => hir_def::builtin_type::BuiltinType::Char,
TyKind::Bool => hir_def::builtin_type::BuiltinType::Bool,
TyKind::Str => hir_def::builtin_type::BuiltinType::Str,
TyKind::Int(int) => hir_def::builtin_type::BuiltinType::Int(match int {
rustc_type_ir::IntTy::Isize => hir_def::builtin_type::BuiltinInt::Isize,
rustc_type_ir::IntTy::I8 => hir_def::builtin_type::BuiltinInt::I8,
rustc_type_ir::IntTy::I16 => hir_def::builtin_type::BuiltinInt::I16,
rustc_type_ir::IntTy::I32 => hir_def::builtin_type::BuiltinInt::I32,
rustc_type_ir::IntTy::I64 => hir_def::builtin_type::BuiltinInt::I64,
rustc_type_ir::IntTy::I128 => hir_def::builtin_type::BuiltinInt::I128,
}),
TyKind::Uint(uint) => hir_def::builtin_type::BuiltinType::Uint(match uint {
rustc_type_ir::UintTy::Usize => hir_def::builtin_type::BuiltinUint::Usize,
rustc_type_ir::UintTy::U8 => hir_def::builtin_type::BuiltinUint::U8,
rustc_type_ir::UintTy::U16 => hir_def::builtin_type::BuiltinUint::U16,
rustc_type_ir::UintTy::U32 => hir_def::builtin_type::BuiltinUint::U32,
rustc_type_ir::UintTy::U64 => hir_def::builtin_type::BuiltinUint::U64,
rustc_type_ir::UintTy::U128 => hir_def::builtin_type::BuiltinUint::U128,
}),
TyKind::Float(float) => hir_def::builtin_type::BuiltinType::Float(match float {
rustc_type_ir::FloatTy::F16 => hir_def::builtin_type::BuiltinFloat::F16,
rustc_type_ir::FloatTy::F32 => hir_def::builtin_type::BuiltinFloat::F32,
rustc_type_ir::FloatTy::F64 => hir_def::builtin_type::BuiltinFloat::F64,
rustc_type_ir::FloatTy::F128 => hir_def::builtin_type::BuiltinFloat::F128,
}),
_ => return None,
};
Some(builtin)
}
// FIXME: Should this be here?
pub fn impl_trait_bounds(self, db: &'db dyn HirDatabase) -> Option<Vec<Clause<'db>>> {
let interner = DbInterner::new_with(db, None, None);
match self.kind() {
TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => {
match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) {
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
db.return_type_impl_traits_ns(func).map(|it| {
let data = (*it).as_ref().map_bound(|rpit| {
&rpit.impl_traits[idx.to_nextsolver(interner)].predicates
});
data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
.collect()
})
}
ImplTraitId::TypeAliasImplTrait(alias, idx) => {
db.type_alias_impl_traits_ns(alias).map(|it| {
let data = (*it).as_ref().map_bound(|rpit| {
&rpit.impl_traits[idx.to_nextsolver(interner)].predicates
});
data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
.collect()
})
}
ImplTraitId::AsyncBlockTypeImplTrait(def, _) => {
let krate = def.module(db).krate();
if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
// This is only used by type walking.
// Parameters will be walked outside, and projection predicate is not used.
// So just provide the Future trait.
let impl_bound = TraitRef::new(
interner,
future_trait.into(),
GenericArgs::new_from_iter(interner, []),
)
.upcast(interner);
Some(vec![impl_bound])
} else {
None
}
}
}
}
TyKind::Param(param) => {
// FIXME: We shouldn't use `param.id` here.
let generic_params = db.generic_params(param.id.parent());
let param_data = &generic_params[param.id.local_id()];
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::ArgumentImplTrait => {
let predicates = db
.generic_predicates_ns(param.id.parent())
.instantiate_identity()
.into_iter()
.flatten()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == self,
ClauseKind::Projection(pred) => pred.self_ty() == self,
ClauseKind::TypeOutlives(pred) => pred.0 == self,
_ => false,
})
.collect::<Vec<_>>();
Some(predicates)
}
_ => None,
},
_ => None,
}
}
_ => None,
}
}
/// FIXME: Get rid of this, it's not a good abstraction
pub fn equals_ctor(self, other: Ty<'db>) -> bool {
match (self.kind(), other.kind()) {
(TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt.def_id() == adt2.def_id(),
(TyKind::Slice(_), TyKind::Slice(_)) | (TyKind::Array(_, _), TyKind::Array(_, _)) => {
true
}
(TyKind::FnDef(def_id, ..), TyKind::FnDef(def_id2, ..)) => def_id == def_id2,
(TyKind::Alias(_, alias, ..), TyKind::Alias(_, alias2)) => {
alias.def_id == alias2.def_id
}
(TyKind::Foreign(ty_id, ..), TyKind::Foreign(ty_id2, ..)) => ty_id == ty_id2,
(TyKind::Closure(id1, _), TyKind::Closure(id2, _)) => id1 == id2,
(TyKind::Ref(.., mutability), TyKind::Ref(.., mutability2))
| (TyKind::RawPtr(.., mutability), TyKind::RawPtr(.., mutability2)) => {
mutability == mutability2
}
(TyKind::FnPtr(sig, hdr), TyKind::FnPtr(sig2, hdr2)) => sig == sig2 && hdr == hdr2,
(TyKind::Tuple(tys), TyKind::Tuple(tys2)) => tys.len() == tys2.len(),
(TyKind::Str, TyKind::Str)
| (TyKind::Never, TyKind::Never)
| (TyKind::Char, TyKind::Char)
| (TyKind::Bool, TyKind::Bool) => true,
(TyKind::Int(int), TyKind::Int(int2)) => int == int2,
(TyKind::Float(float), TyKind::Float(float2)) => float == float2,
_ => false,
}
}
}
pub fn references_non_lt_error<'db, T: TypeVisitableExt<DbInterner<'db>>>(t: &T) -> bool {
t.references_error() && t.visit_with(&mut ReferencesNonLifetimeError).is_break()
}
struct ReferencesNonLifetimeError;
@ -928,11 +1190,17 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
interned_vec_db!(Tys, Ty);
impl<'db> Tys<'db> {
pub fn inputs(&self) -> &[Ty<'db>] {
self.as_slice().split_last().unwrap().1
}
}
impl<'db> rustc_type_ir::inherent::Tys<DbInterner<'db>> for Tys<'db> {
fn inputs(self) -> <DbInterner<'db> as rustc_type_ir::Interner>::FnInputTys {
Tys::new_from_iter(
DbInterner::conjure(),
self.as_slice().split_last().unwrap().1.iter().cloned(),
self.as_slice().split_last().unwrap().1.iter().copied(),
)
}

View file

@ -413,7 +413,7 @@ pub(crate) fn for_trait_impls(
let trait_module = trait_id.module(db);
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)),
Some(TyFingerprint::ForeignType(type_id)) => Some(from_foreign_def_id(type_id).module(db)),
Some(TyFingerprint::ForeignType(type_id)) => Some(type_id.module(db)),
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(db)),
_ => None,
};
@ -486,7 +486,7 @@ pub fn sizedness_constraint_for_ty<'db>(
Tuple(tys) => tys
.into_iter()
.last()
.next_back()
.and_then(|ty| sizedness_constraint_for_ty(interner, sizedness, ty)),
Adt(adt, args) => {

View file

@ -10,7 +10,7 @@ use base_db::{
use hir_def::{ModuleId, db::DefDatabase, nameres::crate_def_map};
use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
use salsa::Durability;
use span::FileId;
use syntax::TextRange;
use test_utils::extract_annotations;
@ -191,8 +191,7 @@ impl TestDB {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
let ingredient = (self as &dyn salsa::Database)
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}

View file

@ -36,10 +36,11 @@ use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{
InferenceResult, Ty,
InferenceResult,
db::HirDatabase,
display::{DisplayTarget, HirDisplay},
infer::{Adjustment, TypeMismatch},
next_solver::Ty,
setup_tracing,
test_db::TestDB,
};
@ -78,172 +79,172 @@ fn check_impl(
let _tracing = setup_tracing();
let (db, files) = TestDB::with_many_files(ra_fixture);
let mut had_annotations = false;
let mut mismatches = FxHashMap::default();
let mut types = FxHashMap::default();
let mut adjustments = FxHashMap::default();
for (file_id, annotations) in db.extract_annotations() {
for (range, expected) in annotations {
let file_range = FileRange { file_id, range };
if only_types {
types.insert(file_range, expected);
} else if expected.starts_with("type: ") {
types.insert(file_range, expected.trim_start_matches("type: ").to_owned());
} else if expected.starts_with("expected") {
mismatches.insert(file_range, expected);
} else if expected.starts_with("adjustments:") {
adjustments.insert(
file_range,
expected.trim_start_matches("adjustments:").trim().to_owned(),
);
} else {
panic!("unexpected annotation: {expected} @ {range:?}");
crate::attach_db(&db, || {
let mut had_annotations = false;
let mut mismatches = FxHashMap::default();
let mut types = FxHashMap::default();
let mut adjustments = FxHashMap::default();
for (file_id, annotations) in db.extract_annotations() {
for (range, expected) in annotations {
let file_range = FileRange { file_id, range };
if only_types {
types.insert(file_range, expected);
} else if expected.starts_with("type: ") {
types.insert(file_range, expected.trim_start_matches("type: ").to_owned());
} else if expected.starts_with("expected") {
mismatches.insert(file_range, expected);
} else if expected.starts_with("adjustments:") {
adjustments.insert(
file_range,
expected.trim_start_matches("adjustments:").trim().to_owned(),
);
} else {
panic!("unexpected annotation: {expected} @ {range:?}");
}
had_annotations = true;
}
had_annotations = true;
}
}
assert!(had_annotations || allow_none, "no `//^` annotations found");
assert!(had_annotations || allow_none, "no `//^` annotations found");
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
for file_id in files {
let module = db.module_for_file_opt(file_id.file_id(&db));
let module = match module {
Some(m) => m,
None => continue,
};
let def_map = module.def_map(&db);
visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
for file_id in files {
let module = db.module_for_file_opt(file_id.file_id(&db));
let module = match module {
Some(m) => m,
None => continue,
};
defs.push((def, module.krate()))
let def_map = module.def_map(&db);
visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
};
defs.push((def, module.krate()))
});
}
defs.sort_by_key(|(def, _)| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
});
}
defs.sort_by_key(|(def, _)| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
});
let mut unexpected_type_mismatches = String::new();
for (def, krate) in defs {
let display_target = DisplayTarget::from_crate(&db, krate);
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = db.infer(def);
let mut unexpected_type_mismatches = String::new();
for (def, krate) in defs {
let display_target = DisplayTarget::from_crate(&db, krate);
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = db.infer(def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
Some(value) => value,
None => continue,
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = salsa::attach(&db, || {
if display_source {
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
Some(value) => value,
None => continue,
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
}
});
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
};
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
}
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match expr_node(&body_source_map, expr, &db) {
Some(value) => value,
None => continue,
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = salsa::attach(&db, || {
if display_source {
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match expr_node(&body_source_map, expr, &db) {
Some(value) => value,
None => continue,
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
}
});
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
};
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
if let Some(expected) = adjustments.remove(&range) {
let adjustments = inference_result
.expr_adjustments
.get(&expr)
.map_or_else(Default::default, |it| &**it);
assert_eq!(
expected,
adjustments
.iter()
.map(|Adjustment { kind, .. }| format!("{kind:?}"))
.join(", ")
);
}
}
if let Some(expected) = adjustments.remove(&range) {
let adjustments = inference_result
.expr_adjustments
.get(&expr)
.map_or_else(Default::default, |it| &**it);
assert_eq!(
expected,
adjustments
.iter()
.map(|Adjustment { kind, .. }| format!("{kind:?}"))
.join(", ")
);
}
}
for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
let Some(node) = (match expr_or_pat {
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
}) else {
continue;
};
let range = node.as_ref().original_file_range_rooted(&db);
let actual = salsa::attach(&db, || {
format!(
for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
let Some(node) = (match expr_or_pat {
hir_def::hir::ExprOrPatId::ExprId(expr) => {
expr_node(&body_source_map, expr, &db)
}
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
}) else {
continue;
};
let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!(
"expected {}, got {}",
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target)
)
});
match mismatches.remove(&range) {
Some(annotation) => assert_eq!(actual, annotation),
None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
);
match mismatches.remove(&range) {
Some(annotation) => assert_eq!(actual, annotation),
None => {
format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual)
}
}
}
}
}
let mut buf = String::new();
if !unexpected_type_mismatches.is_empty() {
format_to!(buf, "Unexpected type mismatches:\n{}", unexpected_type_mismatches);
}
if !mismatches.is_empty() {
format_to!(buf, "Unchecked mismatch annotations:\n");
for m in mismatches {
format_to!(buf, "{:?}: {}\n", m.0.range, m.1);
let mut buf = String::new();
if !unexpected_type_mismatches.is_empty() {
format_to!(buf, "Unexpected type mismatches:\n{}", unexpected_type_mismatches);
}
}
if !types.is_empty() {
format_to!(buf, "Unchecked type annotations:\n");
for t in types {
format_to!(buf, "{:?}: type {}\n", t.0.range, t.1);
if !mismatches.is_empty() {
format_to!(buf, "Unchecked mismatch annotations:\n");
for m in mismatches {
format_to!(buf, "{:?}: {}\n", m.0.range, m.1);
}
}
}
if !adjustments.is_empty() {
format_to!(buf, "Unchecked adjustments annotations:\n");
for t in adjustments {
format_to!(buf, "{:?}: type {:?}\n", t.0.range, t.1);
if !types.is_empty() {
format_to!(buf, "Unchecked type annotations:\n");
for t in types {
format_to!(buf, "{:?}: type {}\n", t.0.range, t.1);
}
}
}
assert!(buf.is_empty(), "{}", buf);
if !adjustments.is_empty() {
format_to!(buf, "Unchecked adjustments annotations:\n");
for t in adjustments {
format_to!(buf, "{:?}: type {:?}\n", t.0.range, t.1);
}
}
assert!(buf.is_empty(), "{}", buf);
});
}
fn expr_node(
@ -282,139 +283,140 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(content);
let mut buf = String::new();
crate::attach_db(&db, || {
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult>,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>,
krate: Crate| {
let display_target = DisplayTarget::from_crate(&db, krate);
let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
let mut infer_def = |inference_result: Arc<InferenceResult<'_>>,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>,
krate: Crate| {
let display_target = DisplayTarget::from_crate(&db, krate);
let mut types: Vec<(InFile<SyntaxNode>, &Ty<'_>)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch<'_>)> = Vec::new();
if let Some(self_param) = body.self_param {
let ty = &inference_result.type_of_binding[self_param];
if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
let root = db.parse_or_expand(syntax_ptr.file_id);
let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
types.push((node, ty));
}
}
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
if let Some(self_param) = body.self_param {
let ty = &inference_result.type_of_binding[self_param];
if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
let root = db.parse_or_expand(syntax_ptr.file_id);
let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
types.push((node, ty));
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
mismatches.push((node, mismatch));
}
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
mismatches.push((node, mismatch));
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
}
// sort ranges for consistency
types.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (node, ty) in &types {
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
(self_param.name().unwrap().syntax().text_range(), "self".to_owned())
} else {
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
};
let macro_prefix = if node.file_id != file_id { "!" } else { "" };
format_to!(
buf,
"{}{:?} '{}': {}\n",
macro_prefix,
range,
ellipsize(text, 15),
ty.display_test(&db, display_target)
);
}
if include_mismatches {
mismatches.sort_by_key(|(node, _)| {
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
}
// sort ranges for consistency
types.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (src_ptr, mismatch) in &mismatches {
let range = src_ptr.value.text_range();
let macro_prefix = if src_ptr.file_id != file_id { "!" } else { "" };
for (node, ty) in &types {
let (range, text) =
if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
(self_param.name().unwrap().syntax().text_range(), "self".to_owned())
} else {
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
};
let macro_prefix = if node.file_id != file_id { "!" } else { "" };
format_to!(
buf,
"{}{:?}: expected {}, got {}\n",
"{}{:?} '{}': {}\n",
macro_prefix,
range,
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target),
ellipsize(text, 15),
ty.display_test(&db, display_target)
);
}
}
};
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
if include_mismatches {
mismatches.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (src_ptr, mismatch) in &mismatches {
let range = src_ptr.value.text_range();
let macro_prefix = if src_ptr.file_id != file_id { "!" } else { "" };
format_to!(
buf,
"{}{:?}: expected {}, got {}\n",
macro_prefix,
range,
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target),
);
}
}
};
defs.push((def, module.krate()))
});
defs.sort_by_key(|(def, _)| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
});
for (def, krate) in defs {
let (body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
salsa::attach(&db, || {
infer_def(infer, body, source_map, krate);
})
}
buf.truncate(buf.trim_end().len());
buf
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
};
defs.push((def, module.krate()))
});
defs.sort_by_key(|(def, _)| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
});
for (def, krate) in defs {
let (body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, body, source_map, krate);
}
buf.truncate(buf.trim_end().len());
buf
})
}
pub(crate) fn visit_module(
@ -556,15 +558,17 @@ fn salsa_bug() {
",
);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
crate::attach_db(&db, || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
});
});
@ -595,15 +599,17 @@ fn salsa_bug() {
db.set_file_text(pos.file_id.file_id(&db), new_text);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
crate::attach_db(&db, || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
});
});
})
}

View file

@ -2,111 +2,121 @@ use expect_test::{Expect, expect};
use hir_def::db::DefDatabase;
use hir_expand::{HirFileId, files::InFileWrapper};
use itertools::Itertools;
use salsa::plumbing::FromId;
use span::TextRange;
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
use crate::db::{HirDatabase, InternedClosureId};
use crate::display::{DisplayTarget, HirDisplay};
use crate::mir::MirSpan;
use crate::test_db::TestDB;
use crate::{
db::HirDatabase,
display::{DisplayTarget, HirDisplay},
mir::MirSpan,
test_db::TestDB,
};
use super::{setup_tracing, visit_module};
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
crate::attach_db(&db, || {
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| defs.push(it));
let mut defs = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| defs.push(it));
let mut captures_info = Vec::new();
for def in defs {
let def = match def {
hir_def::ModuleDefId::FunctionId(it) => it.into(),
hir_def::ModuleDefId::EnumVariantId(it) => it.into(),
hir_def::ModuleDefId::ConstId(it) => it.into(),
hir_def::ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(InternedClosureId::from_id(closure_id.0));
let source_map = db.body_with_source_map(closure.0).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) = db.body_with_source_map(closure.0);
let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = salsa::attach(db, || {
capture
.ty
.skip_binders()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string()
});
let spans = capture
.spans()
.iter()
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
let mut captures_info = Vec::new();
for def in defs {
let def = match def {
hir_def::ModuleDefId::FunctionId(it) => it.into(),
hir_def::ModuleDefId::EnumVariantId(it) => it.into(),
hir_def::ModuleDefId::ConstId(it) => it.into(),
hir_def::ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(
|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(*closure_id);
let source_map = db.body_with_source_map(closure.0).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) = db.body_with_source_map(closure.0);
let local_text_range =
match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = capture
.ty
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string();
let spans = capture
.spans()
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
(
closure_text_range,
local_text_range,
spans,
place,
capture_ty,
capture.kind(),
)
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
},
));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
});
(closure_text_range, local_text_range, spans, place, capture_ty, capture.kind())
})
}));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
});
let rendered = captures_info
let rendered = captures_info
.iter()
.map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| {
format!(
@ -115,7 +125,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
})
.join("\n");
expect.assert_eq(&rendered);
expect.assert_eq(&rendered);
})
}
#[test]

View file

@ -49,7 +49,7 @@ fn let_stmt_coerce() {
//- minicore: coerce_unsized
fn test() {
let x: &[isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize)
let x: *const [isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
}
@ -268,7 +268,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{error}, Not))
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{region error}, Not))
}
"#,
);
@ -567,7 +567,7 @@ trait Foo {}
fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
let _: &dyn Foo = &f;
let _: &dyn Foo = g;
//^ expected &'? (dyn Foo + '?), got &'? impl Foo + ?Sized
//^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized
}
"#,
);
@ -833,11 +833,11 @@ struct V<T> { t: T }
fn main() {
let a: V<&dyn Tr>;
(a,) = V { t: &S };
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,)
let mut a: V<&dyn Tr> = V { t: &S };
(a,) = V { t: &S };
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,)
}
"#,
);
@ -854,8 +854,8 @@ impl core::cmp::PartialEq for Struct {
}
fn test() {
Struct == Struct;
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
}",
);
}
@ -871,7 +871,7 @@ impl core::ops::AddAssign for Struct {
}
fn test() {
Struct += Struct;
// ^^^^^^ adjustments: Borrow(Ref('{error}, Mut))
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Mut))
// ^^^^^^ adjustments:
}",
);

View file

@ -67,11 +67,11 @@ trait B: A {}
fn test<'a>(
_: &(dyn A<Assoc = ()> + Send),
//^ &(dyn A<Assoc = ()> + Send)
//^ &(dyn A<Assoc = ()> + Send + 'static)
_: &'a (dyn Send + A<Assoc = ()>),
//^ &'a (dyn A<Assoc = ()> + Send)
//^ &'a (dyn A<Assoc = ()> + Send + 'static)
_: &dyn B<Assoc = ()>,
//^ &(dyn B<Assoc = ()>)
//^ &(dyn B<Assoc = ()> + 'static)
) {}
"#,
);
@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
trait Foo<'a> {}
fn foo(foo: &dyn for<'a> Foo<'a>) {}
// ^^^ &dyn Foo<'?>
// ^^^ &(dyn Foo<'?> + 'static)
"#,
);
}

View file

@ -44,7 +44,7 @@ fn foo() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"expr_scopes_shim",
"lang_item",
"crate_lang_items",
@ -131,7 +131,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"expr_scopes_shim",
"lang_item",
"crate_lang_items",
@ -143,7 +143,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"expr_scopes_shim",
"infer_shim",
"function_signature_shim",
@ -151,7 +151,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"expr_scopes_shim",
]
"#]],
@ -586,7 +586,7 @@ fn main() {
"attrs_shim",
"attrs_shim",
"generic_predicates_ns_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
@ -594,7 +594,7 @@ fn main() {
"expr_scopes_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"generic_predicates_ns_shim",
"value_ty_shim",
"VariantFields::firewall_",
"VariantFields::query_",
@ -610,7 +610,7 @@ fn main() {
"impl_self_ty_with_diagnostics_shim",
"generic_predicates_ns_shim",
"value_ty_shim",
"generic_predicates_shim",
"generic_predicates_ns_shim",
]
"#]],
);
@ -683,11 +683,12 @@ fn main() {
"attrs_shim",
"attrs_shim",
"generic_predicates_ns_shim",
"return_type_impl_traits_shim",
"return_type_impl_traits_ns_shim",
"infer_shim",
"function_signature_with_source_map_shim",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
"generic_predicates_ns_shim",
"VariantFields::query_",
"inherent_impls_in_crate_shim",
"impl_signature_with_source_map_shim",
@ -697,7 +698,7 @@ fn main() {
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"generic_predicates_ns_shim",
"generic_predicates_shim",
"generic_predicates_ns_shim",
]
"#]],
);
@ -709,8 +710,8 @@ fn execute_assert_events(
required: &[(&str, usize)],
expect: Expect,
) {
let (executed, events) = db.log_executed(f);
salsa::attach(db, || {
crate::attach_db(db, || {
let (executed, events) = db.log_executed(f);
for (event, count) in required {
let n = executed.iter().filter(|it| it.contains(event)).count();
assert_eq!(

View file

@ -199,8 +199,8 @@ fn expr_macro_def_expanded_in_various_places() {
100..119 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': !
100..119 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': &'? mut <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': {unknown}
100..119 'for _ ...!() {}': &'? mut {unknown}
100..119 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
100..119 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item>
100..119 'for _ ...!() {}': ()
@ -293,8 +293,8 @@ fn expr_macro_rules_expanded_in_various_places() {
114..133 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': !
114..133 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': &'? mut <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': {unknown}
114..133 'for _ ...!() {}': &'? mut {unknown}
114..133 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
114..133 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item>
114..133 'for _ ...!() {}': ()

View file

@ -1157,9 +1157,9 @@ fn dyn_trait_super_trait_not_in_scope() {
51..55 'self': &'? Self
64..69 '{ 0 }': u32
66..67 '0': u32
176..177 'd': &'? (dyn Trait + '?)
176..177 'd': &'? (dyn Trait + 'static)
191..207 '{ ...o(); }': ()
197..198 'd': &'? (dyn Trait + '?)
197..198 'd': &'? (dyn Trait + 'static)
197..204 'd.foo()': u32
"#]],
);
@ -2050,7 +2050,7 @@ impl dyn Error + Send {
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
// ^^^^ expected Box<dyn Error + '?>, got Box<dyn Error + Send + 'static>
// ^^^^ expected Box<dyn Error + 'static>, got Box<dyn Error + Send + 'static>
// FIXME, type mismatch should not occur
<dyn Error>::downcast(err).map_err(|_| loop {})
//^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + 'static>) -> Result<Box<{unknown}>, Box<dyn Error + 'static>>

View file

@ -31,6 +31,7 @@ fn test() {
}
#[test]
#[ignore = "FIXME(next-solver): This currently generates a type mismatch, need to switch opaque type handling to the solver"]
fn associated_type_impl_traits_complex() {
check_types(
r#"

View file

@ -1257,8 +1257,8 @@ fn test() {
16..66 'for _ ... }': fn into_iter<()>(()) -> <() as IntoIterator>::IntoIter
16..66 'for _ ... }': <() as IntoIterator>::IntoIter
16..66 'for _ ... }': !
16..66 'for _ ... }': <() as IntoIterator>::IntoIter
16..66 'for _ ... }': &'? mut <() as IntoIterator>::IntoIter
16..66 'for _ ... }': {unknown}
16..66 'for _ ... }': &'? mut {unknown}
16..66 'for _ ... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
16..66 'for _ ... }': Option<<{unknown} as Iterator>::Item>
16..66 'for _ ... }': ()
@ -2363,8 +2363,8 @@ fn test() {
108..125 '{ ... }': usize
118..119 'N': usize
139..157 '{ ...= N; }': ()
149..150 '_': Foo<_>
153..154 'N': Foo<_>
149..150 '_': Foo<N>
153..154 'N': Foo<N>
"#]],
);
}

View file

@ -84,7 +84,7 @@ fn test() -> i32 {
307..359 'core::...n Foo)': DynMetadata<dyn Foo + '?>
327..328 '0': usize
327..340 '0 as *const F': *const F
327..358 '0 as *...yn Foo': *const (dyn Foo + '?)
327..358 '0 as *...yn Foo': *const (dyn Foo + 'static)
370..371 'f': F
374..378 'F {}': F
388..395 'fat_ptr': *const (dyn Foo + '?)

View file

@ -2743,7 +2743,7 @@ impl B for Astruct {}
725..754 '#[rust...1i32])': Box<[i32; 1], Global>
747..753 '[1i32]': [i32; 1]
748..752 '1i32': i32
765..766 'v': Vec<Box<dyn B + '?, Global>, Global>
765..766 'v': Vec<Box<dyn B + 'static, Global>, Global>
786..803 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
786..860 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
804..859 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
@ -3692,39 +3692,6 @@ fn main() {
);
}
#[test]
fn infer_bad_lang_item() {
check_infer(
r#"
#[lang="eq"]
pub trait Eq {
fn eq(&self, ) -> bool;
}
#[lang="shr"]
pub trait Shr<RHS,Result> {
fn shr(&self, rhs: &RHS) -> Result;
}
fn test() -> bool {
1 >> 1;
1 == 1;
}
"#,
expect![[r#"
39..43 'self': &'? Self
114..118 'self': &'? Self
120..123 'rhs': &'? RHS
163..190 '{ ...= 1; }': bool
169..170 '1': i32
169..175 '1 >> 1': {unknown}
181..182 '1': i32
181..187 '1 == 1': {unknown}
"#]],
);
}
#[test]
fn macro_semitransparent_hygiene() {
check_types(

View file

@ -1480,24 +1480,24 @@ fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
206..208 '{}': Box<dyn Trait<u64> + '?>
218..219 'x': Box<dyn Trait<u64> + '?>
242..243 'y': &'? (dyn Trait<u64> + '?)
206..208 '{}': Box<dyn Trait<u64> + 'static>
218..219 'x': Box<dyn Trait<u64> + 'static>
242..243 'y': &'? (dyn Trait<u64> + 'static)
262..379 '{ ...2(); }': ()
268..269 'x': Box<dyn Trait<u64> + '?>
275..276 'y': &'? (dyn Trait<u64> + '?)
268..269 'x': Box<dyn Trait<u64> + 'static>
275..276 'y': &'? (dyn Trait<u64> + 'static)
286..287 'z': Box<dyn Trait<u64> + '?>
290..293 'bar': fn bar() -> Box<dyn Trait<u64> + 'static>
290..295 'bar()': Box<dyn Trait<u64> + 'static>
301..302 'x': Box<dyn Trait<u64> + '?>
301..302 'x': Box<dyn Trait<u64> + 'static>
301..308 'x.foo()': u64
314..315 'y': &'? (dyn Trait<u64> + '?)
314..315 'y': &'? (dyn Trait<u64> + 'static)
314..321 'y.foo()': u64
327..328 'z': Box<dyn Trait<u64> + '?>
327..334 'z.foo()': u64
340..341 'x': Box<dyn Trait<u64> + '?>
340..341 'x': Box<dyn Trait<u64> + 'static>
340..348 'x.foo2()': i64
354..355 'y': &'? (dyn Trait<u64> + '?)
354..355 'y': &'? (dyn Trait<u64> + 'static)
354..362 'y.foo2()': i64
368..369 'z': Box<dyn Trait<u64> + '?>
368..376 'z.foo2()': i64
@ -1528,7 +1528,7 @@ fn test(s: S<u32, i32>) {
expect![[r#"
32..36 'self': &'? Self
102..106 'self': &'? S<T, U>
128..139 '{ loop {} }': &'? (dyn Trait<T, U> + '?)
128..139 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
130..137 'loop {}': !
135..137 '{}': ()
175..179 'self': &'? Self
@ -1561,18 +1561,18 @@ fn test(x: Trait, y: &Trait) -> u64 {
}"#,
expect![[r#"
26..30 'self': &'? Self
60..62 '{}': dyn Trait + '?
72..73 'x': dyn Trait + '?
82..83 'y': &'? (dyn Trait + '?)
60..62 '{}': dyn Trait + 'static
72..73 'x': dyn Trait + 'static
82..83 'y': &'? (dyn Trait + 'static)
100..175 '{ ...o(); }': u64
106..107 'x': dyn Trait + '?
113..114 'y': &'? (dyn Trait + '?)
106..107 'x': dyn Trait + 'static
113..114 'y': &'? (dyn Trait + 'static)
124..125 'z': dyn Trait + '?
128..131 'bar': fn bar() -> dyn Trait + 'static
128..133 'bar()': dyn Trait + 'static
139..140 'x': dyn Trait + '?
139..140 'x': dyn Trait + 'static
139..146 'x.foo()': u64
152..153 'y': &'? (dyn Trait + '?)
152..153 'y': &'? (dyn Trait + 'static)
152..159 'y.foo()': u64
165..166 'z': dyn Trait + '?
165..172 'z.foo()': u64
@ -1594,7 +1594,7 @@ fn main() {
expect![[r#"
31..35 'self': &'? S
37..39 '{}': ()
47..48 '_': &'? (dyn Fn(S) + '?)
47..48 '_': &'? (dyn Fn(S) + 'static)
58..60 '{}': ()
71..105 '{ ...()); }': ()
77..78 'f': fn f(&'? (dyn Fn(S) + 'static))
@ -2948,13 +2948,13 @@ fn test(x: &dyn Foo) {
foo(x);
}"#,
expect![[r#"
21..22 'x': &'? (dyn Foo + '?)
21..22 'x': &'? (dyn Foo + 'static)
34..36 '{}': ()
46..47 'x': &'? (dyn Foo + '?)
46..47 'x': &'? (dyn Foo + 'static)
59..74 '{ foo(x); }': ()
65..68 'foo': fn foo(&'? (dyn Foo + 'static))
65..71 'foo(x)': ()
69..70 'x': &'? (dyn Foo + '?)
69..70 'x': &'? (dyn Foo + 'static)
"#]],
);
}
@ -3230,13 +3230,13 @@ fn foo() {
218..324 '{ ...&s); }': ()
228..229 's': Option<i32>
232..236 'None': Option<i32>
246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + '?>
246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + 'static>
294..308 '&mut (|ps| {})': &'? mut impl FnOnce(&'? Option<i32>)
300..307 '|ps| {}': impl FnOnce(&'? Option<i32>)
301..303 'ps': &'? Option<i32>
305..307 '{}': ()
316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
316..321 'f(&s)': ()
318..320 '&s': &'? Option<i32>
319..320 's': Option<i32>
@ -4272,10 +4272,10 @@ fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
"#,
expect![[r#"
90..94 'self': &'? Self
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
164..195 '{ ...f(); }': ()
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + '? as Trait>::Assoc<i32>
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + 'static as Trait>::Assoc<i32>
170..192 'v.get:...eref()': {unknown}
"#]],
);

View file

@ -12,7 +12,7 @@ use intern::sym;
use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt};
use rustc_type_ir::{
InferCtxtLike, TypingMode,
inherent::{SliceLike, Span as _},
inherent::{IntoKind, SliceLike, Span as _, Ty as _},
solve::Certainty,
};
use span::Edition;
@ -23,11 +23,12 @@ use crate::{
AliasEq, AliasTy, Canonical, DomainGoal, Goal, InEnvironment, Interner, ProjectionTy,
ProjectionTyExt, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause,
db::HirDatabase,
infer::unify::InferenceTable,
from_assoc_type_id,
next_solver::{
DbInterner, GenericArg, ParamEnv, Predicate, SolverContext, Span,
infer::{DbInternerInferExt, InferCtxt},
mapping::{ChalkToNextSolver, convert_canonical_args_for_result},
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
mapping::{ChalkToNextSolver, NextSolverToChalk, convert_canonical_args_for_result},
obligation_ctxt::ObligationCtxt,
util::mini_canonicalize,
},
utils::UnevaluatedConstEvaluatorFolder,
@ -43,7 +44,7 @@ pub struct TraitEnvironment<'db> {
pub krate: Crate,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
traits_from_clauses: Box<[(Ty, TraitId)]>,
traits_from_clauses: Box<[(crate::next_solver::Ty<'db>, TraitId)]>,
pub env: ParamEnv<'db>,
}
@ -60,7 +61,7 @@ impl<'db> TraitEnvironment<'db> {
pub fn new(
krate: Crate,
block: Option<BlockId>,
traits_from_clauses: Box<[(Ty, TraitId)]>,
traits_from_clauses: Box<[(crate::next_solver::Ty<'db>, TraitId)]>,
env: ParamEnv<'db>,
) -> Arc<Self> {
Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
@ -71,13 +72,28 @@ impl<'db> TraitEnvironment<'db> {
Arc::make_mut(this).block = Some(block);
}
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {
pub fn traits_in_scope_from_clauses(
&self,
ty: crate::next_solver::Ty<'db>,
) -> impl Iterator<Item = TraitId> + '_ {
self.traits_from_clauses
.iter()
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id))
}
}
/// This should be used in `hir` only.
pub fn structurally_normalize_ty<'db>(
infcx: &InferCtxt<'db>,
ty: crate::next_solver::Ty<'db>,
env: Arc<TraitEnvironment<'db>>,
) -> crate::next_solver::Ty<'db> {
let crate::next_solver::TyKind::Alias(..) = ty.kind() else { return ty };
let mut ocx = ObligationCtxt::new(infcx);
let ty = ocx.structurally_normalize_ty(&ObligationCause::dummy(), env.env, ty).unwrap_or(ty);
ty.replace_infer_with_error(infcx.interner)
}
pub(crate) fn normalize_projection_query<'db>(
db: &'db dyn HirDatabase,
projection: ProjectionTy,
@ -93,9 +109,30 @@ pub(crate) fn normalize_projection_query<'db>(
return TyKind::Error.intern(Interner);
}
let mut table = InferenceTable::new(db, env);
let ty = table.normalize_projection_ty(projection);
table.resolve_completely(ty)
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
// FIXME(next-solver): I believe this should use `PostAnalysis` (this is only used for IDE things),
// but this causes some bug because of our incorrect impl of `type_of_opaque_hir_typeck()` for TAIT
// and async blocks.
let infcx = interner.infer_ctxt().build(TypingMode::Analysis {
defining_opaque_types_and_generators: crate::next_solver::SolverDefIds::new_from_iter(
interner,
[],
),
});
let alias_ty = crate::next_solver::Ty::new_alias(
interner,
rustc_type_ir::AliasTyKind::Projection,
crate::next_solver::AliasTy::new(
interner,
from_assoc_type_id(projection.associated_ty_id).into(),
<crate::Substitution as ChalkToNextSolver<crate::next_solver::GenericArgs<'_>>>::to_nextsolver(&projection.substitution, interner),
),
);
let mut ctxt = crate::next_solver::obligation_ctxt::ObligationCtxt::new(&infcx);
let normalized = ctxt
.structurally_normalize_ty(&ObligationCause::dummy(), env.env, alias_ty)
.unwrap_or(alias_ty);
normalized.replace_infer_with_error(interner).to_chalk(interner)
}
fn identity_subst(
@ -419,3 +456,43 @@ impl FnTrait {
self.lang_item().resolve_trait(db, krate)
}
}
/// This should not be used in `hir-ty`, only in `hir`.
pub fn implements_trait_unique<'db>(
ty: crate::next_solver::Ty<'db>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
trait_: TraitId,
) -> bool {
implements_trait_unique_impl(db, env, trait_, &mut |infcx| {
infcx.fill_rest_fresh_args(trait_.into(), [ty.into()])
})
}
/// This should not be used in `hir-ty`, only in `hir`.
pub fn implements_trait_unique_with_args<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
trait_: TraitId,
args: crate::next_solver::GenericArgs<'db>,
) -> bool {
implements_trait_unique_impl(db, env, trait_, &mut |_| args)
}
fn implements_trait_unique_impl<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
trait_: TraitId,
create_args: &mut dyn FnMut(&InferCtxt<'db>) -> crate::next_solver::GenericArgs<'db>,
) -> bool {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
// FIXME(next-solver): I believe this should be `PostAnalysis`.
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
let args = create_args(&infcx);
let trait_ref = rustc_type_ir::TraitRef::new_from_args(interner, trait_.into(), args);
let goal = crate::next_solver::Goal::new(interner, env.env, trait_ref);
let result = crate::traits::next_trait_solve_in_ctxt(&infcx, goal);
matches!(result, Ok((_, Certainty::Yes)))
}

View file

@ -20,11 +20,10 @@ use hir_expand::name::Name;
use intern::sym;
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{GenericArgs, IntoKind, SliceLike};
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use smallvec::{SmallVec, smallvec};
use span::Edition;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
ChalkTraitId, Const, ConstScalar, Interner, Substitution, TargetFeatures, TraitRef,
TraitRefExt, Ty,
@ -34,7 +33,7 @@ use crate::{
mir::pad16,
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, convert_args_for_result},
mapping::{ChalkToNextSolver, NextSolverToChalk, convert_args_for_result},
},
to_chalk_trait_id,
};
@ -196,15 +195,6 @@ pub(super) fn associated_type_by_name_including_super_traits(
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> {
pub(crate) fn parent_subst(&self, db: &dyn HirDatabase) -> Substitution {
let interner = DbInterner::new_with(db, None, None);
let subst =
<Substitution as ChalkToNextSolver<crate::next_solver::GenericArgs<'_>>>::to_nextsolver(
self.0, interner,
);
subst.split_closure_args().parent_args.to_chalk(interner)
}
pub(crate) fn sig_ty(&self, db: &dyn HirDatabase) -> Ty {
let interner = DbInterner::new_with(db, None, None);
let subst =
@ -310,10 +300,12 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value
&& let ConstScalar::UnevaluatedConst(id, subst) = &c.interned
{
if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
return Ok(eval);
let interner = DbInterner::conjure();
if let Ok(eval) = self.db.const_eval(*id, subst.to_nextsolver(interner), None) {
return Ok(eval.to_chalk(interner));
} else {
return Ok(unknown_const(constant.data(Interner).ty.clone()));
return Ok(unknown_const(constant.data(Interner).ty.to_nextsolver(interner))
.to_chalk(interner));
}
}
Ok(constant)

View file

@ -1001,84 +1001,86 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
// ));
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::AdtId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::TraitId(it) => it.into(),
ModuleDefId::TypeAliasId(it) => it.into(),
_ => return,
})
});
let defs = defs
.into_iter()
.filter_map(|def| {
Some((
def,
match def {
GenericDefId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::EnumId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::StructId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::UnionId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TraitId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TypeAliasId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::ImplId(_) => return None,
GenericDefId::ConstId(_) => return None,
GenericDefId::StaticId(_) => return None,
},
))
})
.sorted_by_key(|(_, n)| n.syntax().text_range().start());
let mut res = String::new();
for (def, name) in defs {
let Some(variances) = db.variances_of(def) else {
continue;
};
format_to!(
res,
"{name}[{}]\n",
generics(&db, def)
.iter()
.map(|(_, param)| match param {
GenericParamDataRef::TypeParamData(type_param_data) => {
type_param_data.name.as_ref().unwrap()
}
GenericParamDataRef::ConstParamData(const_param_data) =>
&const_param_data.name,
GenericParamDataRef::LifetimeParamData(lifetime_param_data) => {
&lifetime_param_data.name
}
})
.zip_eq(&*variances)
.format_with(", ", |(name, var), f| f(&format_args!(
"{}: {var}",
name.as_str()
)))
);
}
crate::attach_db(&db, || {
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::AdtId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::TraitId(it) => it.into(),
ModuleDefId::TypeAliasId(it) => it.into(),
_ => return,
})
});
let defs = defs
.into_iter()
.filter_map(|def| {
Some((
def,
match def {
GenericDefId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::EnumId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::StructId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::UnionId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TraitId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TypeAliasId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::ImplId(_) => return None,
GenericDefId::ConstId(_) => return None,
GenericDefId::StaticId(_) => return None,
},
))
})
.sorted_by_key(|(_, n)| n.syntax().text_range().start());
let mut res = String::new();
for (def, name) in defs {
let Some(variances) = db.variances_of(def) else {
continue;
};
format_to!(
res,
"{name}[{}]\n",
generics(&db, def)
.iter()
.map(|(_, param)| match param {
GenericParamDataRef::TypeParamData(type_param_data) => {
type_param_data.name.as_ref().unwrap()
}
GenericParamDataRef::ConstParamData(const_param_data) =>
&const_param_data.name,
GenericParamDataRef::LifetimeParamData(lifetime_param_data) => {
&lifetime_param_data.name
}
})
.zip_eq(&*variances)
.format_with(", ", |(name, var), f| f(&format_args!(
"{}: {var}",
name.as_str()
)))
);
}
expected.assert_eq(&res);
expected.assert_eq(&res);
})
}
}

View file

@ -14,11 +14,7 @@ use hir_expand::{
mod_path::{ModPath, PathKind},
name::Name,
};
use hir_ty::{
db::HirDatabase,
method_resolution,
next_solver::{DbInterner, mapping::ChalkToNextSolver},
};
use hir_ty::{db::HirDatabase, method_resolution};
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -261,7 +257,7 @@ fn resolve_impl_trait_item<'db>(
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let canonical = ty.canonical();
let canonical = ty.canonical(db);
let krate = ty.krate(db);
let environment = resolver
.generic_def()
@ -275,11 +271,7 @@ fn resolve_impl_trait_item<'db>(
//
// FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
_ = method_resolution::iterate_path_candidates(
&canonical.to_nextsolver(DbInterner::new_with(
db,
Some(environment.krate),
environment.block,
)),
&canonical,
db,
environment,
&traits_in_scope,

Some files were not shown because too many files have changed in this diff Show more