Merge pull request #20454 from jackh726/next-trait-solver-next

Convert some things from chalk_ir types to rustc_type_ir types
This commit is contained in:
Shoyu Vanilla (Flint) 2025-08-17 16:23:17 +00:00 committed by GitHub
commit 37a352d3ec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
52 changed files with 1872 additions and 1685 deletions

View file

@ -1,44 +1,13 @@
//! The implementation of `RustIrDatabase` for Chalk, which provides information
//! about the code that Chalk needs.
use std::sync::Arc;
use hir_def::{CallableDefId, GenericDefId};
use tracing::debug;
use chalk_ir::{cast::Caster, fold::shift::Shift};
use chalk_solve::rust_ir::{self, WellKnownTrait};
use base_db::Crate;
use hir_def::{
AssocItemId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
VariantId,
lang_item::LangItem,
signatures::{ImplFlags, StructFlags, TraitFlags},
};
use crate::{
AliasEq, AliasTy, DebruijnIndex, Interner, ProjectionTyExt, QuantifiedWhereClause,
Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id,
generics::generics,
lower::LifetimeElisionKind,
make_binders,
mapping::{ToChalk, TypeAliasAsValue, from_chalk},
to_assoc_type_id, to_chalk_trait_id,
};
pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
pub(crate) type AdtDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
use crate::{Interner, Substitution, db::HirDatabase, mapping::from_chalk};
pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
pub(crate) type TraitId = chalk_ir::TraitId<Interner>;
pub(crate) type AdtId = chalk_ir::AdtId<Interner>;
pub(crate) type ImplId = chalk_ir::ImplId<Interner>;
pub(crate) type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
pub(crate) type Variances = chalk_ir::Variances<Interner>;
impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase {
@ -54,340 +23,6 @@ impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase {
}
}
pub(crate) fn associated_ty_data_query(
db: &dyn HirDatabase,
type_alias: TypeAliasId,
) -> Arc<AssociatedTyDatum> {
debug!("associated_ty_data {:?}", type_alias);
let trait_ = match type_alias.lookup(db).container {
ItemContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_signature(type_alias);
let generic_params = generics(db, type_alias.into());
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let mut ctx = crate::TyLoweringContext::new(
db,
&resolver,
&type_alias_data.store,
type_alias.into(),
LifetimeElisionKind::AnonymousReportError,
)
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
let trait_subst = TyBuilder::subst_for_def(db, trait_, None)
.fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
.build();
let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst))
.fill_with_bound_vars(
crate::DebruijnIndex::INNERMOST,
generic_params.parent_generics().map_or(0, |it| it.len()),
)
.build();
let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, self_ty.clone(), false).for_each(|pred| {
if let Some(pred) = generic_predicate_to_inline_bound(db, &pred, &self_ty) {
bounds.push(pred);
}
});
}
if !ctx.unsized_types.contains(&self_ty) {
let sized_trait =
LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id);
let sized_bound = sized_trait.into_iter().map(|sized_trait| {
let trait_bound =
rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() };
let inline_bound = rust_ir::InlineBound::TraitBound(trait_bound);
chalk_ir::Binders::empty(Interner, inline_bound)
});
bounds.extend(sized_bound);
bounds.shrink_to_fit();
}
// FIXME: Re-enable where clauses on associated types when an upstream chalk bug is fixed.
// (rust-analyzer#9052)
// let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
let datum = AssociatedTyDatum {
trait_id: to_chalk_trait_id(trait_),
id: to_assoc_type_id(type_alias),
name: type_alias,
binders: make_binders(db, &generic_params, bound_data),
};
Arc::new(datum)
}
pub(crate) fn trait_datum_query(
db: &dyn HirDatabase,
krate: Crate,
trait_id: TraitId,
) -> Arc<TraitDatum> {
debug!("trait_datum {:?}", trait_id);
let trait_ = from_chalk_trait_id(trait_id);
let trait_data = db.trait_signature(trait_);
debug!("trait {:?} = {:?}", trait_id, trait_data.name);
let generic_params = generics(db, trait_.into());
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let flags = rust_ir::TraitFlags {
auto: trait_data.flags.contains(TraitFlags::AUTO),
upstream: trait_.lookup(db).container.krate() != krate,
non_enumerable: true,
coinductive: false, // only relevant for Chalk testing
// FIXME: set these flags correctly
marker: false,
fundamental: trait_data.flags.contains(TraitFlags::FUNDAMENTAL),
};
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids =
trait_.trait_items(db).associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(db, &generic_params, trait_datum_bound),
flags,
associated_ty_ids,
well_known,
};
Arc::new(trait_datum)
}
fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
Some(match item {
LangItem::Clone => WellKnownTrait::Clone,
LangItem::CoerceUnsized => WellKnownTrait::CoerceUnsized,
LangItem::Copy => WellKnownTrait::Copy,
LangItem::DiscriminantKind => WellKnownTrait::DiscriminantKind,
LangItem::DispatchFromDyn => WellKnownTrait::DispatchFromDyn,
LangItem::Drop => WellKnownTrait::Drop,
LangItem::Fn => WellKnownTrait::Fn,
LangItem::FnMut => WellKnownTrait::FnMut,
LangItem::FnOnce => WellKnownTrait::FnOnce,
LangItem::AsyncFn => WellKnownTrait::AsyncFn,
LangItem::AsyncFnMut => WellKnownTrait::AsyncFnMut,
LangItem::AsyncFnOnce => WellKnownTrait::AsyncFnOnce,
LangItem::Coroutine => WellKnownTrait::Coroutine,
LangItem::Sized => WellKnownTrait::Sized,
LangItem::Unpin => WellKnownTrait::Unpin,
LangItem::Unsize => WellKnownTrait::Unsize,
LangItem::Tuple => WellKnownTrait::Tuple,
LangItem::PointeeTrait => WellKnownTrait::Pointee,
LangItem::FnPtrTrait => WellKnownTrait::FnPtr,
LangItem::Future => WellKnownTrait::Future,
_ => return None,
})
}
pub(crate) fn adt_datum_query(
db: &dyn HirDatabase,
krate: Crate,
chalk_ir::AdtId(adt_id): AdtId,
) -> Arc<AdtDatum> {
debug!("adt_datum {:?}", adt_id);
let generic_params = generics(db, adt_id.into());
let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst);
let (fundamental, phantom_data) = match adt_id {
hir_def::AdtId::StructId(s) => {
let flags = db.struct_signature(s).flags;
(flags.contains(StructFlags::FUNDAMENTAL), flags.contains(StructFlags::IS_PHANTOM_DATA))
}
// FIXME set fundamental flags correctly
hir_def::AdtId::UnionId(_) => (false, false),
hir_def::AdtId::EnumId(_) => (false, false),
};
let flags = rust_ir::AdtFlags {
upstream: adt_id.module(db).krate() != krate,
fundamental,
phantom_data,
};
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
let _variant_id_to_fields = |id: VariantId| {
let variant_data = &id.fields(db);
let fields = if variant_data.fields().is_empty() {
vec![]
} else {
let field_types = db.field_types(id);
variant_data
.fields()
.iter()
.map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst))
.filter(|it| !it.contains_unknown())
.collect()
};
rust_ir::AdtVariantDatum { fields }
};
let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] };
let (kind, variants) = match adt_id {
hir_def::AdtId::StructId(id) => {
(rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())])
}
hir_def::AdtId::EnumId(id) => {
let variants = id
.enum_variants(db)
.variants
.iter()
.map(|&(variant_id, _, _)| variant_id_to_fields(variant_id.into()))
.collect();
(rust_ir::AdtKind::Enum, variants)
}
hir_def::AdtId::UnionId(id) => {
(rust_ir::AdtKind::Union, vec![variant_id_to_fields(id.into())])
}
};
let struct_datum_bound = rust_ir::AdtDatumBound { variants, where_clauses };
let struct_datum = AdtDatum {
kind,
id: chalk_ir::AdtId(adt_id),
binders: make_binders(db, &generic_params, struct_datum_bound),
flags,
};
Arc::new(struct_datum)
}
pub(crate) fn impl_datum_query(
db: &dyn HirDatabase,
krate: Crate,
impl_id: ImplId,
) -> Arc<ImplDatum> {
let _p = tracing::info_span!("impl_datum_query").entered();
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_)
}
fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId) -> Arc<ImplDatum> {
let trait_ref = db
.impl_trait(impl_id)
// ImplIds for impls where the trait ref can't be resolved should never reach Chalk
.expect("invalid impl passed to Chalk")
.into_value_and_skipped_binders()
.0;
let impl_data = db.impl_signature(impl_id);
let generic_params = generics(db, impl_id.into());
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let trait_ = trait_ref.hir_trait_id();
let impl_type = if impl_id.lookup(db).container.krate() == krate {
rust_ir::ImplType::Local
} else {
rust_ir::ImplType::External
};
let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
let negative = impl_data.flags.contains(ImplFlags::NEGATIVE);
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
let trait_data = trait_.trait_items(db);
let associated_ty_value_ids = impl_id
.impl_items(db)
.items
.iter()
.filter_map(|(_, item)| match item {
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
_ => None,
})
.filter(|&type_alias| {
// don't include associated types that don't exist in the trait
let name = &db.type_alias_signature(type_alias).name;
trait_data.associated_type_by_name(name).is_some()
})
.map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
.collect();
debug!("impl_datum: {:?}", impl_datum_bound);
let impl_datum = ImplDatum {
binders: make_binders(db, &generic_params, impl_datum_bound),
impl_type,
polarity,
associated_ty_value_ids,
};
Arc::new(impl_datum)
}
pub(crate) fn associated_ty_value_query(
db: &dyn HirDatabase,
krate: Crate,
id: AssociatedTyValueId,
) -> Arc<AssociatedTyValue> {
let type_alias: TypeAliasAsValue = from_chalk(db, id);
type_alias_associated_ty_value(db, krate, type_alias.0)
}
fn type_alias_associated_ty_value(
db: &dyn HirDatabase,
_krate: Crate,
type_alias: TypeAliasId,
) -> Arc<AssociatedTyValue> {
let type_alias_data = db.type_alias_signature(type_alias);
let impl_id = match type_alias.lookup(db).container {
ItemContainerId::ImplId(it) => it,
_ => panic!("assoc ty value should be in impl"),
};
let trait_ref = db
.impl_trait(impl_id)
.expect("assoc ty value should not exist")
.into_value_and_skipped_binders()
.0; // we don't return any assoc ty values if the impl'd trait can't be resolved
let assoc_ty = trait_ref
.hir_trait_id()
.trait_items(db)
.associated_type_by_name(&type_alias_data.name)
.expect("assoc ty value should not exist"); // validated when building the impl data as well
let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
let value_bound = rust_ir::AssociatedTyValueBound { ty };
let value = rust_ir::AssociatedTyValue {
impl_id: impl_id.to_chalk(db),
associated_ty_id: to_assoc_type_id(assoc_ty),
value: chalk_ir::Binders::new(binders, value_bound),
};
Arc::new(value)
}
pub(crate) fn fn_def_datum_query(
db: &dyn HirDatabase,
callable_def: CallableDefId,
) -> Arc<FnDefDatum> {
let generic_def = GenericDefId::from_callable(db, callable_def);
let generic_params = generics(db, generic_def);
let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let where_clauses = convert_where_clauses(db, generic_def, &bound_vars);
let bound = rust_ir::FnDefDatumBound {
// Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
inputs_and_output: chalk_ir::Binders::empty(
Interner,
rust_ir::FnDefInputsAndOutputDatum {
argument_types: sig.params().to_vec(),
return_type: sig.ret().clone(),
}
.shifted_in(Interner),
),
where_clauses,
};
let datum = FnDefDatum {
id: callable_def.to_chalk(db),
sig: chalk_ir::FnSig {
abi: sig.abi,
safety: chalk_ir::Safety::Safe,
variadic: sig.is_varargs,
},
binders: chalk_ir::Binders::new(binders, bound),
};
Arc::new(datum)
}
pub(crate) fn fn_def_variance_query(
db: &dyn HirDatabase,
callable_def: CallableDefId,
@ -431,59 +66,3 @@ pub(super) fn convert_where_clauses(
.map(|pred| pred.substitute(Interner, substs))
.collect()
}
pub(super) fn generic_predicate_to_inline_bound(
db: &dyn HirDatabase,
pred: &QuantifiedWhereClause,
self_ty: &Ty,
) -> Option<chalk_ir::Binders<rust_ir::InlineBound<Interner>>> {
// An InlineBound is like a GenericPredicate, except the self type is left out.
// We don't have a special type for this, but Chalk does.
let self_ty_shifted_in = self_ty.clone().shifted_in_from(Interner, DebruijnIndex::ONE);
let (pred, binders) = pred.as_ref().into_value_and_skipped_binders();
match pred {
WhereClause::Implemented(trait_ref) => {
if trait_ref.self_type_parameter(Interner) != self_ty_shifted_in {
// we can only convert predicates back to type bounds if they
// have the expected self type
return None;
}
let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
.iter()
.cloned()
.casted(Interner)
.collect();
let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
}
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
let generics = generics(db, from_assoc_type_id(projection_ty.associated_ty_id).into());
let parent_len = generics.parent_generics().map_or(0, |g| g.len_self());
let (trait_args, assoc_args) =
projection_ty.substitution.as_slice(Interner).split_at(parent_len);
let (self_ty, args_no_self) =
trait_args.split_first().expect("projection without trait self type");
if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in {
return None;
}
let args_no_self = args_no_self.iter().cloned().casted(Interner).collect();
let parameters = assoc_args.to_vec();
let alias_eq_bound = rust_ir::AliasEqBound {
value: ty.clone(),
trait_bound: rust_ir::TraitBound {
trait_id: to_chalk_trait_id(projection_ty.trait_(db)),
args_no_self,
},
associated_ty_id: projection_ty.associated_ty_id,
parameters,
};
Some(chalk_ir::Binders::new(
binders,
rust_ir::InlineBound::AliasEqBound(alias_eq_bound),
))
}
_ => None,
}
}

View file

@ -15,8 +15,14 @@ use triomphe::Arc;
use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics,
infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
TraitEnvironment, Ty, TyBuilder,
db::HirDatabase,
display::DisplayTarget,
generics::Generics,
infer::InferenceContext,
lower::ParamLoweringMode,
next_solver::{DbInterner, mapping::ChalkToNextSolver},
to_placeholder_idx,
};
use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
@ -157,7 +163,8 @@ pub fn intern_const_ref(
ty: Ty,
krate: Crate,
) -> Const {
let layout = || db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
let interner = DbInterner::new_with(db, Some(krate), None);
let layout = || db.layout_of_ty(ty.to_nextsolver(interner), TraitEnvironment::empty(krate));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.

View file

@ -76,7 +76,7 @@ fn check_str(#[rust_analyzer::rust_fixture] ra_fixture: &str, answer: &str) {
#[track_caller]
fn check_answer(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
check: impl FnOnce(&[u8], &MemoryMap),
check: impl FnOnce(&[u8], &MemoryMap<'_>),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();

View file

@ -92,7 +92,7 @@ pub fn intern_const_ref<'a>(
krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_with(db, Some(krate), None);
let layout = db.layout_of_ty_ns(ty, TraitEnvironment::empty(krate));
let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate));
let kind = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@ -132,9 +132,9 @@ pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Cr
)
}
pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<u128> {
pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: Const<'db>) -> Option<u128> {
let interner = DbInterner::new_with(db, None, None);
match (*c).kind() {
match c.kind() {
ConstKind::Param(_) => None,
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
@ -147,7 +147,7 @@ pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<
};
let subst = convert_args_for_result(interner, unevaluated_const.args.as_slice());
let ec = db.const_eval(c, subst, None).ok()?.to_nextsolver(interner);
try_const_usize(db, &ec)
try_const_usize(db, ec)
}
ConstKind::Value(val) => Some(u128::from_le_bytes(pad16(&val.value.inner().0, false))),
ConstKind::Error(_) => None,
@ -212,7 +212,7 @@ pub(crate) fn const_eval_discriminant_variant(
let c = if is_signed {
try_const_isize(db, &c).unwrap()
} else {
try_const_usize(db, &c).unwrap() as i128
try_const_usize(db, c).unwrap() as i128
};
Ok(c)
}

View file

@ -1,8 +1,6 @@
//! The home of `HirDatabase`, which is the Salsa database containing all the
//! type inference-related queries.
use std::sync;
use base_db::Crate;
use hir_def::{
AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
@ -94,16 +92,20 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::layout::layout_of_adt_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)]
fn layout_of_adt(
&self,
fn layout_of_adt<'db>(
&'db self,
def: AdtId,
subst: Substitution,
env: Arc<TraitEnvironment>,
args: crate::next_solver::GenericArgs<'db>,
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)]
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
fn layout_of_ty<'db>(
&'db self,
ty: crate::next_solver::Ty<'db>,
env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>;
@ -179,16 +181,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericPredicates, Diagnostics);
#[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
@ -246,26 +238,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)]
fn trait_datum(
&self,
krate: Crate,
trait_id: chalk_db::TraitId,
) -> sync::Arc<chalk_db::TraitDatum>;
#[salsa::invoke(chalk_db::adt_datum_query)]
fn adt_datum(&self, krate: Crate, struct_id: chalk_db::AdtId) -> sync::Arc<chalk_db::AdtDatum>;
#[salsa::invoke(chalk_db::impl_datum_query)]
fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
-> sync::Arc<chalk_db::ImplDatum>;
#[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
#[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
@ -280,13 +252,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
#[salsa::invoke(chalk_db::associated_ty_value_query)]
fn associated_ty_value(
&self,
krate: Crate,
id: chalk_db::AssociatedTyValueId,
) -> sync::Arc<chalk_db::AssociatedTyValue>;
#[salsa::invoke(crate::traits::normalize_projection_query)]
#[salsa::transparent]
fn normalize_projection(
@ -310,23 +275,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// next trait solver
#[salsa::invoke(crate::layout::layout_of_adt_ns_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_adt_ns_cycle_result)]
fn layout_of_adt_ns<'db>(
&'db self,
def: AdtId,
args: crate::next_solver::GenericArgs<'db>,
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_ns_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_ty_ns_cycle_result)]
fn layout_of_ty_ns<'db>(
&'db self,
ty: crate::next_solver::Ty<'db>,
env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::lower_nextsolver::ty_query)]
#[salsa::transparent]
fn ty_ns<'db>(

View file

@ -5,7 +5,6 @@
use std::fmt;
use base_db::Crate;
use chalk_solve::rust_ir::AdtKind;
use either::Either;
use hir_def::{
AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
@ -300,11 +299,7 @@ impl ExprValidator {
value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
}
Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) {
TyKind::Adt(adt, ..)
if db.adt_datum(self.owner.krate(db), *adt).kind == AdtKind::Union =>
{
false
}
TyKind::Adt(adt, ..) if matches!(adt.0, AdtId::UnionId(_)) => false,
_ => self.is_known_valid_scrutinee(*expr, db),
},
Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db),

File diff suppressed because it is too large Load diff

View file

@ -2,27 +2,28 @@
use std::ops::ControlFlow;
use chalk_ir::{
DebruijnIndex,
cast::Cast,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
};
use chalk_solve::rust_ir::InlineBound;
use hir_def::{
AssocItemId, ConstId, CrateRootModuleId, FunctionId, GenericDefId, HasModule, TraitId,
TypeAliasId, lang_item::LangItem, signatures::TraitFlags,
};
use intern::Symbol;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _,
Upcast, elaborate,
inherent::{IntoKind, SliceLike},
};
use smallvec::SmallVec;
use crate::{
AliasEq, AliasTy, Binders, BoundVar, CallableSig, DomainGoal, GoalData, ImplTraitId, Interner,
OpaqueTyId, ProjectionTyExt, Substitution, TraitRef, Ty, TyKind, WhereClause, all_super_traits,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id,
generics::{generics, trait_self_param_idx},
to_chalk_trait_id,
utils::elaborate_clause_supertraits,
ImplTraitId,
db::{HirDatabase, InternedOpaqueTyId},
lower_nextsolver::associated_ty_item_bounds,
next_solver::{
Clause, Clauses, DbInterner, GenericArgs, ParamEnv, SolverDefId, TraitPredicate,
TypingMode, infer::DbInternerInferExt, mk_param,
},
traits::next_trait_solve_in_ctxt,
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -52,13 +53,22 @@ pub fn dyn_compatibility(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Option<DynCompatibilityViolation> {
for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
if db.dyn_compatibility_of_trait(super_trait).is_some() {
return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
for super_trait in elaborate::supertrait_def_ids(interner, SolverDefId::TraitId(trait_)) {
let super_trait = match super_trait {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
if let Some(v) = db.dyn_compatibility_of_trait(super_trait) {
return if super_trait == trait_ {
Some(v)
} else {
Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait))
};
}
}
db.dyn_compatibility_of_trait(trait_)
None
}
pub fn dyn_compatibility_with_callback<F>(
@ -69,7 +79,13 @@ pub fn dyn_compatibility_with_callback<F>(
where
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
{
for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
for super_trait in elaborate::supertrait_def_ids(interner, SolverDefId::TraitId(trait_)).skip(1)
{
let super_trait = match super_trait {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
if db.dyn_compatibility_of_trait(super_trait).is_some() {
cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
}
@ -128,27 +144,23 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
return false;
};
let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
return false;
};
let predicates = &*db.generic_predicates(def);
let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone());
elaborate_clause_supertraits(db, predicates).any(|pred| match pred {
WhereClause::Implemented(trait_ref) => {
if from_chalk_trait_id(trait_ref.trait_id) == sized
&& let TyKind::BoundVar(it) =
*trait_ref.self_type_parameter(Interner).kind(Interner)
{
// Since `generic_predicates` is `Binder<Binder<..>>`, the `DebrujinIndex` of
// self-parameter is `1`
return it
.index_if_bound_at(DebruijnIndex::ONE)
.is_some_and(|idx| idx == trait_self_param_idx);
let interner = DbInterner::new_with(db, Some(krate), None);
let predicates = db.generic_predicates_ns(def);
elaborate::elaborate(interner, predicates.iter().copied()).any(|pred| {
match pred.kind().skip_binder() {
ClauseKind::Trait(trait_pred) => {
if SolverDefId::TraitId(sized) == trait_pred.def_id()
&& let rustc_type_ir::TyKind::Param(param_ty) =
trait_pred.trait_ref.self_ty().kind()
&& param_ty.index == 0
{
true
} else {
false
}
}
false
_ => false,
}
_ => false,
})
}
@ -156,7 +168,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
// but we don't have good way to render such locations.
// So, just return single boolean value for existence of such `Self` reference
fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
db.generic_predicates(trait_.into())
db.generic_predicates_ns(trait_.into())
.iter()
.any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No))
}
@ -168,37 +180,18 @@ fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
.items
.iter()
.filter_map(|(_, it)| match *it {
AssocItemId::TypeAliasId(id) => {
let assoc_ty_data = db.associated_ty_data(id);
Some(assoc_ty_data)
}
AssocItemId::TypeAliasId(id) => Some(associated_ty_item_bounds(db, id)),
_ => None,
})
.any(|assoc_ty_data| {
assoc_ty_data.binders.skip_binders().bounds.iter().any(|bound| {
let def = from_assoc_type_id(assoc_ty_data.id).into();
match bound.skip_binders() {
InlineBound::TraitBound(it) => it.args_no_self.iter().any(|arg| {
contains_illegal_self_type_reference(
db,
def,
trait_,
arg,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
)
}),
InlineBound::AliasEqBound(it) => it.parameters.iter().any(|arg| {
contains_illegal_self_type_reference(
db,
def,
trait_,
arg,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
)
}),
}
.any(|bounds| {
bounds.skip_binder().iter().any(|pred| match pred.skip_binder() {
rustc_type_ir::ExistentialPredicate::Trait(it) => it.args.iter().any(|arg| {
contains_illegal_self_type_reference(db, trait_, &arg, AllowSelfProjection::Yes)
}),
rustc_type_ir::ExistentialPredicate::Projection(it) => it.args.iter().any(|arg| {
contains_illegal_self_type_reference(db, trait_, &arg, AllowSelfProjection::Yes)
}),
rustc_type_ir::ExistentialPredicate::AutoTrait(_) => false,
})
})
}
@ -209,114 +202,86 @@ enum AllowSelfProjection {
No,
}
fn predicate_references_self(
db: &dyn HirDatabase,
fn predicate_references_self<'db>(
db: &'db dyn HirDatabase,
trait_: TraitId,
predicate: &Binders<Binders<WhereClause>>,
predicate: &Clause<'db>,
allow_self_projection: AllowSelfProjection,
) -> bool {
match predicate.skip_binders().skip_binders() {
WhereClause::Implemented(trait_ref) => {
trait_ref.substitution.iter(Interner).skip(1).any(|arg| {
contains_illegal_self_type_reference(
db,
trait_.into(),
trait_,
arg,
DebruijnIndex::ONE,
allow_self_projection,
)
})
}
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), .. }) => {
proj.substitution.iter(Interner).skip(1).any(|arg| {
contains_illegal_self_type_reference(
db,
trait_.into(),
trait_,
arg,
DebruijnIndex::ONE,
allow_self_projection,
)
match predicate.kind().skip_binder() {
ClauseKind::Trait(trait_pred) => trait_pred.trait_ref.args.iter().skip(1).any(|arg| {
contains_illegal_self_type_reference(db, trait_, &arg, allow_self_projection)
}),
ClauseKind::Projection(proj_pred) => {
proj_pred.projection_term.args.iter().skip(1).any(|arg| {
contains_illegal_self_type_reference(db, trait_, &arg, allow_self_projection)
})
}
_ => false,
}
}
fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
db: &dyn HirDatabase,
def: GenericDefId,
fn contains_illegal_self_type_reference<'db, T: rustc_type_ir::TypeVisitable<DbInterner<'db>>>(
db: &'db dyn HirDatabase,
trait_: TraitId,
t: &T,
outer_binder: DebruijnIndex,
allow_self_projection: AllowSelfProjection,
) -> bool {
let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
return false;
};
struct IllegalSelfTypeVisitor<'a> {
db: &'a dyn HirDatabase,
struct IllegalSelfTypeVisitor<'db> {
db: &'db dyn HirDatabase,
trait_: TraitId,
super_traits: Option<SmallVec<[TraitId; 4]>>,
trait_self_param_idx: usize,
allow_self_projection: AllowSelfProjection,
}
impl TypeVisitor<Interner> for IllegalSelfTypeVisitor<'_> {
type BreakTy = ();
impl<'db> rustc_type_ir::TypeVisitor<DbInterner<'db>> for IllegalSelfTypeVisitor<'db> {
type Result = ControlFlow<()>;
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
match ty.kind(Interner) {
TyKind::BoundVar(BoundVar { debruijn, index }) => {
if *debruijn == outer_binder && *index == self.trait_self_param_idx {
ControlFlow::Break(())
} else {
ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
TyKind::Alias(AliasTy::Projection(proj)) => match self.allow_self_projection {
fn visit_ty(
&mut self,
ty: <DbInterner<'db> as rustc_type_ir::Interner>::Ty,
) -> Self::Result {
let interner = DbInterner::new_with(self.db, None, None);
match ty.kind() {
rustc_type_ir::TyKind::Param(param) if param.index == 0 => ControlFlow::Break(()),
rustc_type_ir::TyKind::Param(_) => ControlFlow::Continue(()),
rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => match self
.allow_self_projection
{
AllowSelfProjection::Yes => {
let trait_ = proj.trait_(self.db);
let trait_ = proj.trait_def_id(DbInterner::new_with(self.db, None, None));
let trait_ = match trait_ {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
if self.super_traits.is_none() {
self.super_traits = Some(all_super_traits(self.db, self.trait_));
self.super_traits = Some(
elaborate::supertrait_def_ids(
interner,
SolverDefId::TraitId(self.trait_),
)
.map(|super_trait| match super_trait {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
})
.collect(),
)
}
if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
ControlFlow::Continue(())
} else {
ty.super_visit_with(self.as_dyn(), outer_binder)
ty.super_visit_with(self)
}
}
AllowSelfProjection::No => ty.super_visit_with(self.as_dyn(), outer_binder),
AllowSelfProjection::No => ty.super_visit_with(self),
},
_ => ty.super_visit_with(self.as_dyn(), outer_binder),
_ => ty.super_visit_with(self),
}
}
fn visit_const(
&mut self,
constant: &chalk_ir::Const<Interner>,
outer_binder: DebruijnIndex,
) -> std::ops::ControlFlow<Self::BreakTy> {
constant.data(Interner).ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
let mut visitor = IllegalSelfTypeVisitor {
db,
trait_,
super_traits: None,
trait_self_param_idx,
allow_self_projection,
};
t.visit_with(visitor.as_dyn(), outer_binder).is_break()
let mut visitor =
IllegalSelfTypeVisitor { db, trait_, super_traits: None, allow_self_projection };
t.visit_with(&mut visitor).is_break()
}
fn dyn_compatibility_violation_for_assoc_item<F>(
@ -375,26 +340,21 @@ where
cb(MethodViolationCode::AsyncFn)?;
}
let sig = db.callable_item_signature(func.into());
if sig.skip_binders().params().iter().skip(1).any(|ty| {
contains_illegal_self_type_reference(
db,
func.into(),
trait_,
ty,
DebruijnIndex::INNERMOST,
AllowSelfProjection::Yes,
)
}) {
let sig = db.callable_item_signature_ns(func.into());
if sig
.skip_binder()
.inputs()
.iter()
.skip(1)
.any(|ty| contains_illegal_self_type_reference(db, trait_, &ty, AllowSelfProjection::Yes))
{
cb(MethodViolationCode::ReferencesSelfInput)?;
}
if contains_illegal_self_type_reference(
db,
func.into(),
trait_,
sig.skip_binders().ret(),
DebruijnIndex::INNERMOST,
&sig.skip_binder().output(),
AllowSelfProjection::Yes,
) {
cb(MethodViolationCode::ReferencesSelfOutput)?;
@ -415,40 +375,33 @@ where
cb(MethodViolationCode::UndispatchableReceiver)?;
}
let predicates = &*db.generic_predicates_without_parent(func.into());
let trait_self_idx = trait_self_param_idx(db, func.into());
let predicates = &*db.generic_predicates_without_parent_ns(func.into());
for pred in predicates {
let pred = pred.skip_binders().skip_binders();
let pred = pred.kind().skip_binder();
if matches!(pred, WhereClause::TypeOutlives(_)) {
if matches!(pred, ClauseKind::TypeOutlives(_)) {
continue;
}
// Allow `impl AutoTrait` predicates
if let WhereClause::Implemented(TraitRef { trait_id, substitution }) = pred {
let trait_data = db.trait_signature(from_chalk_trait_id(*trait_id));
if trait_data.flags.contains(TraitFlags::AUTO)
&& substitution
.as_slice(Interner)
.first()
.and_then(|arg| arg.ty(Interner))
.and_then(|ty| ty.bound_var(Interner))
.is_some_and(|b| {
b.debruijn == DebruijnIndex::ONE && Some(b.index) == trait_self_idx
})
{
continue;
}
let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
if let ClauseKind::Trait(TraitPredicate {
trait_ref: pred_trait_ref,
polarity: PredicatePolarity::Positive,
}) = pred
&& let SolverDefId::TraitId(trait_id) = pred_trait_ref.def_id
&& let trait_data = db.trait_signature(trait_id)
&& trait_data.flags.contains(TraitFlags::AUTO)
&& pred_trait_ref.self_ty()
== crate::next_solver::Ty::new(
interner,
rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0 }),
)
{
continue;
}
if contains_illegal_self_type_reference(
db,
func.into(),
trait_,
pred,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
) {
if contains_illegal_self_type_reference(db, trait_, &pred, AllowSelfProjection::Yes) {
cb(MethodViolationCode::WhereClauseReferencesSelf)?;
break;
}
@ -457,34 +410,30 @@ where
ControlFlow::Continue(())
}
fn receiver_is_dispatchable(
fn receiver_is_dispatchable<'db>(
db: &dyn HirDatabase,
trait_: TraitId,
func: FunctionId,
sig: &Binders<CallableSig>,
sig: &crate::next_solver::EarlyBinder<
'db,
crate::next_solver::Binder<'db, rustc_type_ir::FnSig<DbInterner<'db>>>,
>,
) -> bool {
let Some(trait_self_idx) = trait_self_param_idx(db, func.into()) else {
return false;
};
let sig = sig.instantiate_identity();
let interner: DbInterner<'_> = DbInterner::new_with(db, Some(trait_.krate(db)), None);
let self_param_ty = crate::next_solver::Ty::new(
interner,
rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0 }),
);
// `self: Self` can't be dispatched on, but this is already considered dyn-compatible
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
if sig
.skip_binders()
.params()
.first()
.and_then(|receiver| receiver.bound_var(Interner))
.is_some_and(|b| {
b == BoundVar { debruijn: DebruijnIndex::INNERMOST, index: trait_self_idx }
})
{
if sig.inputs().iter().next().is_some_and(|p| p.skip_binder() == self_param_ty) {
return true;
}
let placeholder_subst = generics(db, func.into()).placeholder_subst(db);
let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst);
let Some(receiver_ty) = substituted_sig.params().first() else {
let Some(&receiver_ty) = sig.inputs().skip_binder().as_slice().first() else {
return false;
};
@ -497,118 +446,118 @@ fn receiver_is_dispatchable(
return false;
};
// Type `U`
let unsized_self_ty =
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner);
// `Receiver[Self => U]`
let Some(unsized_receiver_ty) = receiver_for_self_ty(db, func, unsized_self_ty.clone()) else {
let meta_sized_did = LangItem::MetaSized.resolve_trait(db, krate);
let Some(meta_sized_did) = meta_sized_did else {
return false;
};
let self_ty = placeholder_subst.as_slice(Interner)[trait_self_idx].assert_ty_ref(Interner);
let unsized_predicate = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(unsize_did),
substitution: Substitution::from_iter(Interner, [self_ty.clone(), unsized_self_ty.clone()]),
});
let unsized_predicate =
Binders::empty(Interner, unsized_predicate.cast::<DomainGoal>(Interner));
let trait_predicate = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(trait_),
substitution: Substitution::from_iter(
Interner,
std::iter::once(unsized_self_ty.cast(Interner))
.chain(placeholder_subst.iter(Interner).skip(1).cloned()),
),
});
let trait_predicate = Binders::empty(Interner, trait_predicate.cast::<DomainGoal>(Interner));
// Type `U`
let unsized_self_ty = crate::next_solver::Ty::new_param(interner, u32::MAX, Symbol::empty());
// `Receiver[Self => U]`
let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty);
let generic_predicates = &*db.generic_predicates(func.into());
let param_env = {
let generic_predicates = &*db.generic_predicates_ns(func.into());
let goals = std::iter::once(unsized_predicate).chain(std::iter::once(trait_predicate)).chain(
generic_predicates.iter().map(|pred| {
pred.clone()
.substitute(Interner, &placeholder_subst)
.map(|g| g.cast::<DomainGoal>(Interner))
}),
// Self: Unsize<U>
let unsize_predicate = crate::next_solver::TraitRef::new(
interner,
SolverDefId::TraitId(unsize_did),
[self_param_ty, unsized_self_ty],
);
// U: Trait<Arg1, ..., ArgN>
let trait_def_id = SolverDefId::TraitId(trait_);
let args = GenericArgs::for_item(interner, trait_def_id, |name, index, kind, _| {
if index == 0 { unsized_self_ty.into() } else { mk_param(interner, index, name, kind) }
});
let trait_predicate =
crate::next_solver::TraitRef::new_from_args(interner, trait_def_id, args);
let meta_sized_predicate = crate::next_solver::TraitRef::new(
interner,
SolverDefId::TraitId(meta_sized_did),
[unsized_self_ty],
);
ParamEnv {
clauses: Clauses::new_from_iter(
interner,
generic_predicates.iter().copied().chain([
unsize_predicate.upcast(interner),
trait_predicate.upcast(interner),
meta_sized_predicate.upcast(interner),
]),
),
}
};
// Receiver: DispatchFromDyn<Receiver[Self => U]>
let predicate = crate::next_solver::TraitRef::new(
interner,
SolverDefId::TraitId(dispatch_from_dyn_did),
[receiver_ty, unsized_receiver_ty],
);
let clauses = chalk_ir::ProgramClauses::from_iter(
Interner,
goals.into_iter().map(|g| {
chalk_ir::ProgramClause::new(
Interner,
chalk_ir::ProgramClauseData(g.map(|g| chalk_ir::ProgramClauseImplication {
consequence: g,
conditions: chalk_ir::Goals::empty(Interner),
constraints: chalk_ir::Constraints::empty(Interner),
priority: chalk_ir::ClausePriority::High,
})),
)
}),
);
let env: chalk_ir::Environment<Interner> = chalk_ir::Environment { clauses };
let goal = crate::next_solver::Goal::new(interner, param_env, predicate);
let obligation = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(dispatch_from_dyn_did),
substitution: Substitution::from_iter(Interner, [receiver_ty.clone(), unsized_receiver_ty]),
});
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(obligation)).intern(Interner);
let in_env = chalk_ir::InEnvironment::new(&env, goal);
let mut table = chalk_solve::infer::InferenceTable::<Interner>::new();
let canonicalized = table.canonicalize(Interner, in_env);
db.trait_solve(krate, None, canonicalized.quantified).certain()
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
// the receiver is dispatchable iff the obligation holds
let res = next_trait_solve_in_ctxt(&infcx, goal);
res.map_or(false, |res| matches!(res.1, rustc_type_ir::solve::Certainty::Yes))
}
fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option<Ty> {
let generics = generics(db, func.into());
let trait_self_idx = trait_self_param_idx(db, func.into())?;
let subst = generics.placeholder_subst(db);
let subst = Substitution::from_iter(
Interner,
subst.iter(Interner).enumerate().map(|(idx, arg)| {
if idx == trait_self_idx { ty.clone().cast(Interner) } else { arg.clone() }
}),
fn receiver_for_self_ty<'db>(
interner: DbInterner<'db>,
func: FunctionId,
receiver_ty: crate::next_solver::Ty<'db>,
self_ty: crate::next_solver::Ty<'db>,
) -> crate::next_solver::Ty<'db> {
let args = crate::next_solver::GenericArgs::for_item(
interner,
SolverDefId::FunctionId(func),
|name, index, kind, _| {
if index == 0 { self_ty.into() } else { mk_param(interner, index, name, kind) }
},
);
let sig = db.callable_item_signature(func.into());
let sig = sig.substitute(Interner, &subst);
sig.params_and_return.first().cloned()
crate::next_solver::EarlyBinder::bind(receiver_ty).instantiate(interner, args)
}
fn contains_illegal_impl_trait_in_trait(
db: &dyn HirDatabase,
sig: &Binders<CallableSig>,
fn contains_illegal_impl_trait_in_trait<'db>(
db: &'db dyn HirDatabase,
sig: &crate::next_solver::EarlyBinder<
'db,
crate::next_solver::Binder<'db, rustc_type_ir::FnSig<DbInterner<'db>>>,
>,
) -> Option<MethodViolationCode> {
struct OpaqueTypeCollector(FxHashSet<OpaqueTyId>);
struct OpaqueTypeCollector(FxHashSet<InternedOpaqueTyId>);
impl TypeVisitor<Interner> for OpaqueTypeCollector {
type BreakTy = ();
impl<'db> rustc_type_ir::TypeVisitor<DbInterner<'db>> for OpaqueTypeCollector {
type Result = ControlFlow<()>;
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
if let TyKind::OpaqueType(opaque_ty_id, _) = ty.kind(Interner) {
self.0.insert(*opaque_ty_id);
fn visit_ty(
&mut self,
ty: <DbInterner<'db> as rustc_type_ir::Interner>::Ty,
) -> Self::Result {
if let rustc_type_ir::TyKind::Alias(AliasTyKind::Opaque, op) = ty.kind() {
let id = match op.def_id {
SolverDefId::InternedOpaqueTyId(id) => id,
_ => unreachable!(),
};
self.0.insert(id);
}
ty.super_visit_with(self.as_dyn(), outer_binder)
ty.super_visit_with(self)
}
}
let ret = sig.skip_binders().ret();
let ret = sig.skip_binder().output();
let mut visitor = OpaqueTypeCollector(FxHashSet::default());
_ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
_ = ret.visit_with(&mut visitor);
// Since we haven't implemented RPITIT in proper way like rustc yet,
// just check whether `ret` contains RPIT for now
for opaque_ty in visitor.0 {
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.into());
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty);
if matches!(impl_trait_id, ImplTraitId::ReturnTypeImplTrait(..)) {
return Some(MethodViolationCode::ReferencesImplTraitInTrait);
}

View file

@ -1,12 +1,12 @@
//! Type cast logic. Basically coercion + additional casts.
use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy};
use hir_def::{AdtId, hir::ExprId};
use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use stdx::never;
use crate::{
Adjustment, Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex,
QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause,
QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause, from_chalk_trait_id,
infer::{coerce::CoerceNever, unify::InferenceTable},
};
@ -290,10 +290,12 @@ impl CastCheck {
return Ok(());
}
let src_principal =
table.db.trait_datum(table.trait_env.krate, src_principal);
table.db.trait_signature(from_chalk_trait_id(src_principal));
let dst_principal =
table.db.trait_datum(table.trait_env.krate, dst_principal);
if src_principal.is_auto_trait() && dst_principal.is_auto_trait() {
table.db.trait_signature(from_chalk_trait_id(dst_principal));
if src_principal.flags.contains(TraitFlags::AUTO)
&& dst_principal.flags.contains(TraitFlags::AUTO)
{
Ok(())
} else {
Err(CastError::DifferingKinds)

View file

@ -9,7 +9,6 @@ use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
};
use either::Either;
use hir_def::Lookup;
use hir_def::{
DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
expr_store::path::Path,
@ -22,6 +21,7 @@ use hir_def::{
resolver::ValueNs,
type_ref::TypeRefId,
};
use hir_def::{ItemContainerId, Lookup, TraitId};
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::{FxHashMap, FxHashSet};
@ -30,16 +30,16 @@ use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
use crate::{
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTy, ProjectionTyExt,
Substitution, Ty, TyBuilder, TyExt, WhereClause,
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ClosureId, DynTy, DynTyExt, FnAbi,
FnPointer, FnSig, Interner, OpaqueTy, ProjectionTy, ProjectionTyExt, Substitution, Ty,
TyBuilder, TyExt, WhereClause,
db::{HirDatabase, InternedClosure, InternedCoroutine},
error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx,
generics::Generics,
infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever},
make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
to_assoc_type_id, to_chalk_trait_id,
to_assoc_type_id,
traits::FnTrait,
utils::{self, elaborate_clause_supertraits},
};
@ -321,10 +321,8 @@ impl InferenceContext<'_> {
fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
// Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
let fn_traits: SmallVec<[ChalkTraitId; 3]> =
utils::fn_traits(self.db, self.owner.module(self.db).krate())
.map(to_chalk_trait_id)
.collect();
let fn_traits: SmallVec<[TraitId; 3]> =
utils::fn_traits(self.db, self.owner.module(self.db).krate()).collect();
let self_ty = self.result.standard_types.unknown.clone();
let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
@ -333,9 +331,13 @@ impl InferenceContext<'_> {
if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
bound.skip_binders()
{
let assoc_data =
self.db.associated_ty_data(from_assoc_type_id(projection.associated_ty_id));
if !fn_traits.contains(&assoc_data.trait_id) {
let trait_ =
match from_assoc_type_id(projection.associated_ty_id).lookup(self.db).container
{
ItemContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
if !fn_traits.contains(&trait_) {
return None;
}

View file

@ -31,11 +31,8 @@ use crate::{
},
};
pub(crate) use self::adt::{layout_of_adt_cycle_result, layout_of_adt_ns_cycle_result};
pub use self::{
adt::{layout_of_adt_ns_query, layout_of_adt_query},
target::target_data_layout_query,
};
pub(crate) use self::adt::layout_of_adt_cycle_result;
pub use self::{adt::layout_of_adt_query, target::target_data_layout_query};
pub(crate) mod adt;
pub(crate) mod target;
@ -153,24 +150,14 @@ fn layout_of_simd_ty<'db>(
return Err(LayoutError::InvalidSimdType);
};
let e_len = try_const_usize(db, &e_len).ok_or(LayoutError::HasErrorConst)? as u64;
let e_ly = db.layout_of_ty_ns(e_ty, env)?;
let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64;
let e_ly = db.layout_of_ty(e_ty, env)?;
let cx = LayoutCx::new(dl);
Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?))
}
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: crate::Ty,
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let interner = DbInterner::new_with(db, Some(krate), trait_env.block);
db.layout_of_ty_ns(ty.to_nextsolver(interner), trait_env)
}
pub fn layout_of_ty_ns_query<'db>(
pub fn layout_of_ty_query<'db>(
db: &'db dyn HirDatabase,
ty: Ty<'db>,
trait_env: Arc<TraitEnvironment>,
@ -197,7 +184,7 @@ pub fn layout_of_ty_ns_query<'db>(
}
_ => {}
}
return db.layout_of_adt_ns(def.inner().id, args, trait_env);
return db.layout_of_adt(def.inner().id, args, trait_env);
}
TyKind::Bool => Layout::scalar(
dl,
@ -265,19 +252,19 @@ pub fn layout_of_ty_ns_query<'db>(
let fields = tys
.iter()
.map(|k| db.layout_of_ty_ns(k, trait_env.clone()))
.map(|k| db.layout_of_ty(k, trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.calc.univariant(&fields, &ReprOptions::default(), kind)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty_ns(element, trait_env)?;
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element, trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, Some(count))?
}
TyKind::Slice(element) => {
let element = db.layout_of_ty_ns(element, trait_env)?;
let element = db.layout_of_ty(element, trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, None)?
}
TyKind::Str => {
@ -349,7 +336,7 @@ pub fn layout_of_ty_ns_query<'db>(
let ty =
convert_binder_to_early_binder(interner, it.ty.to_nextsolver(interner))
.instantiate(interner, args);
db.layout_of_ty_ns(ty, trait_env.clone())
db.layout_of_ty(ty, trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
@ -379,15 +366,7 @@ pub fn layout_of_ty_ns_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_ty_cycle_result(
_: &dyn HirDatabase,
_: crate::Ty,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}
pub(crate) fn layout_of_ty_ns_cycle_result<'db>(
pub(crate) fn layout_of_ty_cycle_result<'db>(
_: &dyn HirDatabase,
_: Ty<'db>,
_: Arc<TraitEnvironment>,

View file

@ -13,23 +13,13 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
Substitution, TraitEnvironment,
TraitEnvironment,
db::HirDatabase,
layout::{Layout, LayoutCx, LayoutError, field_ty},
next_solver::{DbInterner, GenericArgs, mapping::ChalkToNextSolver},
next_solver::GenericArgs,
};
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
subst: Substitution,
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
db.layout_of_adt_ns(def, subst.to_nextsolver(interner), trait_env)
}
pub fn layout_of_adt_ns_query<'db>(
pub fn layout_of_adt_query<'db>(
db: &'db dyn HirDatabase,
def: AdtId,
args: GenericArgs<'db>,
@ -44,7 +34,7 @@ pub fn layout_of_adt_ns_query<'db>(
let handle_variant = |def: VariantId, var: &VariantFields| {
var.fields()
.iter()
.map(|(fd, _)| db.layout_of_ty_ns(field_ty(db, def, fd, &args), trait_env.clone()))
.map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr, is_special_no_niche) = match def {
@ -105,16 +95,7 @@ pub fn layout_of_adt_ns_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_adt_cycle_result(
_: &dyn HirDatabase,
_: AdtId,
_: Substitution,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}
pub(crate) fn layout_of_adt_ns_cycle_result<'db>(
pub(crate) fn layout_of_adt_cycle_result<'db>(
_: &'db dyn HirDatabase,
_def: AdtId,
_args: GenericArgs<'db>,

View file

@ -11,6 +11,7 @@ use crate::{
Interner, Substitution,
db::HirDatabase,
layout::{Layout, LayoutError},
next_solver::{DbInterner, mapping::ChalkToNextSolver},
setup_tracing,
test_db::TestDB,
};
@ -85,13 +86,16 @@ fn eval_goal(
db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner))
}
};
db.layout_of_ty(
goal_ty,
db.trait_environment(match adt_or_type_alias_id {
Either::Left(adt) => hir_def::GenericDefId::AdtId(adt),
Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty),
}),
)
salsa::attach(&db, || {
let interner = DbInterner::new_with(&db, None, None);
db.layout_of_ty(
goal_ty.to_nextsolver(interner),
db.trait_environment(match adt_or_type_alias_id {
Either::Left(adt) => hir_def::GenericDefId::AdtId(adt),
Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty),
}),
)
})
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
@ -128,7 +132,10 @@ fn eval_expr(
.0;
let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b].clone();
db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
salsa::attach(&db, || {
let interner = DbInterner::new_with(&db, None, None);
db.layout_of_ty(goal_ty.to_nextsolver(interner), db.trait_environment(function_id.into()))
})
}
#[track_caller]

View file

@ -90,6 +90,7 @@ use intern::{Symbol, sym};
use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::inherent::SliceLike;
use syntax::ast::{ConstArg, make};
use traits::FnTrait;
use triomphe::Arc;
@ -100,6 +101,7 @@ use crate::{
display::{DisplayTarget, HirDisplay},
generics::Generics,
infer::unify::InferenceTable,
next_solver::{DbInterner, mapping::convert_ty_for_result},
};
pub use autoderef::autoderef;
@ -116,8 +118,9 @@ pub use infer::{
pub use interner::Interner;
pub use lower::{
ImplTraitLoweringMode, LifetimeElisionKind, ParamLoweringMode, TyDefId, TyLoweringContext,
ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*,
ValueTyDefId, diagnostics::*,
};
pub use lower_nextsolver::associated_type_shorthand_candidates;
pub use mapping::{
ToChalk, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id,
@ -210,20 +213,20 @@ pub(crate) type ProgramClause = chalk_ir::ProgramClause<Interner>;
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub enum MemoryMap {
pub enum MemoryMap<'db> {
#[default]
Empty,
Simple(Box<[u8]>),
Complex(Box<ComplexMemoryMap>),
Complex(Box<ComplexMemoryMap<'db>>),
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct ComplexMemoryMap {
pub struct ComplexMemoryMap<'db> {
memory: IndexMap<usize, Box<[u8]>, FxBuildHasher>,
vtable: VTableMap,
vtable: VTableMap<'db>,
}
impl ComplexMemoryMap {
impl ComplexMemoryMap<'_> {
fn insert(&mut self, addr: usize, val: Box<[u8]>) {
match self.memory.entry(addr) {
Entry::Occupied(mut e) => {
@ -238,8 +241,8 @@ impl ComplexMemoryMap {
}
}
impl MemoryMap {
pub fn vtable_ty(&self, id: usize) -> Result<&Ty, MirEvalError> {
impl<'db> MemoryMap<'db> {
pub fn vtable_ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>, MirEvalError> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
@ -289,10 +292,11 @@ impl MemoryMap {
}
}
// FIXME(next-solver): add a lifetime to this
/// A concrete constant value
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar {
Bytes(Box<[u8]>, MemoryMap),
Bytes(Box<[u8]>, MemoryMap<'static>),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(GeneralConstId, Substitution),
@ -313,6 +317,30 @@ impl Hash for ConstScalar {
}
}
/// A concrete constant value
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalarNs<'db> {
Bytes(Box<[u8]>, MemoryMap<'db>),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(GeneralConstId, Substitution),
/// Case of an unknown value that rustc might know but we don't
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
// https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
// https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
Unknown,
}
impl Hash for ConstScalarNs<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
core::mem::discriminant(self).hash(state);
if let ConstScalarNs::Bytes(b, _) = self {
b.hash(state)
}
}
}
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics::generics(db, id.parent).type_or_const_param_idx(id)
@ -560,6 +588,27 @@ impl CallableSig {
abi: fn_ptr.sig.abi,
}
}
pub fn from_fn_sig_and_header<'db>(
interner: DbInterner<'db>,
sig: crate::next_solver::Binder<'db, rustc_type_ir::FnSigTys<DbInterner<'db>>>,
header: rustc_type_ir::FnHeader<DbInterner<'db>>,
) -> CallableSig {
CallableSig {
// FIXME: what to do about lifetime params? -> return PolyFnSig
params_and_return: Arc::from_iter(
sig.skip_binder()
.inputs_and_output
.iter()
.map(|t| convert_ty_for_result(interner, t)),
),
is_varargs: header.c_variadic,
safety: match header.safety {
next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe,
next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe,
},
abi: header.abi,
}
}
pub fn to_fn_ptr(&self) -> FnPointer {
FnPointer {

View file

@ -804,15 +804,6 @@ pub(crate) fn callable_item_signature_query(db: &dyn HirDatabase, def: CallableD
}
}
pub fn associated_type_shorthand_candidates<R>(
db: &dyn HirDatabase,
def: GenericDefId,
res: TypeNs,
mut cb: impl FnMut(&Name, TypeAliasId) -> Option<R>,
) -> Option<R> {
named_associated_type_shorthand_candidates(db, def, res, None, |name, _, id| cb(name, id))
}
fn named_associated_type_shorthand_candidates<R>(
db: &dyn HirDatabase,
// If the type parameter is defined in an impl and we're in a method, there
@ -1179,22 +1170,6 @@ pub(crate) fn generic_predicates_query(
generic_predicates_filtered_by(db, def, |_, _| true).0
}
pub(crate) fn generic_predicates_without_parent_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> GenericPredicates {
db.generic_predicates_without_parent_with_diagnostics(def).0
}
/// Resolve the where clause(s) of an item with generics,
/// except the ones inherited from the parent
pub(crate) fn generic_predicates_without_parent_with_diagnostics_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> (GenericPredicates, Diagnostics) {
generic_predicates_filtered_by(db, def, |_, d| d == def)
}
/// Resolve the where clause(s) of an item with generics,
/// with a given filter
fn generic_predicates_filtered_by<F>(

View file

@ -12,14 +12,14 @@ pub(crate) mod path;
use std::{
cell::OnceCell,
iter, mem,
ops::{self, Not as _},
ops::{self, Deref, Not as _},
};
use base_db::Crate;
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstParamId, EnumVariantId, FunctionId, GenericDefId,
GenericParamId, ImplId, ItemContainerId, LocalFieldId, Lookup, StructId, TypeAliasId,
GenericParamId, ImplId, ItemContainerId, LocalFieldId, Lookup, StructId, TraitId, TypeAliasId,
TypeOrConstParamId, VariantId,
expr_store::{
ExpressionStore,
@ -49,6 +49,7 @@ use rustc_type_ir::{
inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
};
use salsa::plumbing::AsId;
use smallvec::{SmallVec, smallvec};
use stdx::never;
use triomphe::Arc;
@ -61,9 +62,10 @@ use crate::{
lower::{Diagnostics, PathDiagnosticCallbackData, create_diagnostics},
next_solver::{
AdtDef, AliasTy, Binder, BoundExistentialPredicates, BoundRegionKind, BoundTyKind,
BoundVarKind, BoundVarKinds, Clause, Const, DbInterner, EarlyBinder, EarlyParamRegion,
ErrorGuaranteed, GenericArgs, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate,
TraitRef, Ty, Tys, abi::Safety, generics::GenericParamDefKind, mapping::ChalkToNextSolver,
BoundVarKind, BoundVarKinds, Clause, Clauses, Const, DbInterner, EarlyBinder,
EarlyParamRegion, ErrorGuaranteed, GenericArgs, PolyFnSig, Predicate, Region, SolverDefId,
TraitPredicate, TraitRef, Ty, Tys, abi::Safety, generics::GenericParamDefKind,
mapping::ChalkToNextSolver,
},
};
@ -1592,6 +1594,96 @@ fn fn_sig_for_enum_variant_constructor<'db>(
}))
}
// FIXME(next-solver): should merge this with `explicit_item_bounds` in some way
pub(crate) fn associated_ty_item_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> {
let trait_ = match type_alias.lookup(db).container {
ItemContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&type_alias_data.store,
type_alias.into(),
LifetimeElisionKind::AnonymousReportError,
);
// FIXME: we should never create non-existential predicates in the first place
// For now, use an error type so we don't run into dummy binder issues
let self_ty = Ty::new_error(interner, ErrorGuaranteed);
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| {
if let Some(bound) = pred
.kind()
.map_bound(|c| match c {
rustc_type_ir::ClauseKind::Trait(t) => {
let id = t.def_id();
let id = match id {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
let is_auto = db.trait_signature(id).flags.contains(TraitFlags::AUTO);
if is_auto {
Some(ExistentialPredicate::AutoTrait(t.def_id()))
} else {
Some(ExistentialPredicate::Trait(ExistentialTraitRef::new_from_args(
interner,
t.def_id(),
GenericArgs::new_from_iter(
interner,
t.trait_ref.args.iter().skip(1),
),
)))
}
}
rustc_type_ir::ClauseKind::Projection(p) => Some(
ExistentialPredicate::Projection(ExistentialProjection::new_from_args(
interner,
p.def_id(),
GenericArgs::new_from_iter(
interner,
p.projection_term.args.iter().skip(1),
),
p.term,
)),
),
rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => None,
rustc_type_ir::ClauseKind::RegionOutlives(_)
| rustc_type_ir::ClauseKind::ConstArgHasType(_, _)
| rustc_type_ir::ClauseKind::WellFormed(_)
| rustc_type_ir::ClauseKind::ConstEvaluatable(_)
| rustc_type_ir::ClauseKind::HostEffect(_)
| rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(),
})
.transpose()
{
bounds.push(bound);
}
});
}
if !ctx.unsized_types.contains(&self_ty) {
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new(
interner,
SolverDefId::TraitId(trait_),
[] as [crate::next_solver::GenericArg<'_>; 0],
)));
bounds.push(sized_clause);
bounds.shrink_to_fit();
}
EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds))
}
pub(crate) fn associated_type_by_name_including_super_traits<'db>(
db: &'db dyn HirDatabase,
trait_ref: TraitRef<'db>,
@ -1607,3 +1699,133 @@ pub(crate) fn associated_type_by_name_including_super_traits<'db>(
Some((t.skip_binder(), assoc_type))
})
}
pub fn associated_type_shorthand_candidates(
db: &dyn HirDatabase,
def: GenericDefId,
res: TypeNs,
mut cb: impl FnMut(&Name, TypeAliasId) -> bool,
) -> Option<TypeAliasId> {
let interner = DbInterner::new_with(db, None, None);
named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| {
cb(name, id).then_some(id)
})
}
#[tracing::instrument(skip(interner, check_alias))]
fn named_associated_type_shorthand_candidates<'db, R>(
interner: DbInterner<'db>,
// If the type parameter is defined in an impl and we're in a method, there
// might be additional where clauses to consider
def: GenericDefId,
res: TypeNs,
assoc_name: Option<Name>,
mut check_alias: impl FnMut(&Name, TraitRef<'db>, TypeAliasId) -> Option<R>,
) -> Option<R> {
let db = interner.db;
let mut search = |t: TraitRef<'db>| -> Option<R> {
let trait_id = match t.def_id {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
let mut checked_traits = FxHashSet::default();
let mut check_trait = |trait_id: TraitId| {
let name = &db.trait_signature(trait_id).name;
tracing::debug!(?trait_id, ?name);
if !checked_traits.insert(trait_id) {
return None;
}
let data = trait_id.trait_items(db);
tracing::debug!(?data.items);
for (name, assoc_id) in &data.items {
if let &AssocItemId::TypeAliasId(alias) = assoc_id
&& let Some(ty) = check_alias(name, t, alias)
{
return Some(ty);
}
}
None
};
let mut stack: SmallVec<[_; 4]> = smallvec![trait_id];
while let Some(trait_def_id) = stack.pop() {
if let Some(alias) = check_trait(trait_def_id) {
return Some(alias);
}
for pred in generic_predicates_filtered_by(
db,
GenericDefId::TraitId(trait_def_id),
PredicateFilter::SelfTrait,
|pred| pred == GenericDefId::TraitId(trait_def_id),
)
.0
.deref()
{
tracing::debug!(?pred);
let trait_id = match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(pred) => pred.def_id(),
_ => continue,
};
let trait_id = match trait_id {
SolverDefId::TraitId(trait_id) => trait_id,
_ => continue,
};
stack.push(trait_id);
}
tracing::debug!(?stack);
}
None
};
match res {
TypeNs::SelfType(impl_id) => {
let trait_ref = db.impl_trait_ns(impl_id)?;
// FIXME(next-solver): same method in `lower` checks for impl or not
// Is that needed here?
// we're _in_ the impl -- the binders get added back later. Correct,
// but it would be nice to make this more explicit
search(trait_ref.skip_binder())
}
TypeNs::GenericParam(param_id) => {
// Handle `Self::Type` referring to own associated type in trait definitions
// This *must* be done first to avoid cycles with
// `generic_predicates_for_param`, but not sure that it's sufficient,
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_name = &db.trait_signature(trait_id).name;
tracing::debug!(?trait_name);
let trait_generics = generics(db, trait_id.into());
tracing::debug!(?trait_generics);
if trait_generics[param_id.local_id()].is_trait_self() {
let args = crate::next_solver::GenericArgs::identity_for_item(
interner,
trait_id.into(),
);
let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args);
tracing::debug!(?args, ?trait_ref);
return search(trait_ref);
}
}
let predicates =
db.generic_predicates_for_param_ns(def, param_id.into(), assoc_name.clone());
predicates
.iter()
.find_map(|pred| match (*pred).kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate),
_ => None,
})
.and_then(|trait_predicate| {
let trait_ref = trait_predicate.trait_ref;
assert!(
!trait_ref.has_escaping_bound_vars(),
"FIXME unexpected higher-ranked trait bound"
);
search(trait_ref)
})
}
_ => None,
}
}

View file

@ -4,7 +4,7 @@ use std::ops::Deref;
use either::Either;
use hir_def::{
AssocItemId, GenericDefId, GenericParamId, Lookup, TraitId,
AssocItemId, GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId,
builtin_type::BuiltinType,
expr_store::{
ExpressionStore, HygieneId,
@ -17,6 +17,7 @@ use hir_def::{
signatures::TraitFlags,
type_ref::{TypeRef, TypeRefId},
};
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
@ -33,7 +34,10 @@ use crate::{
db::HirDatabase,
generics::{Generics, generics},
lower::PathDiagnosticCallbackData,
lower_nextsolver::{LifetimeElisionKind, PredicateFilter, generic_predicates_filtered_by},
lower_nextsolver::{
LifetimeElisionKind, PredicateFilter, generic_predicates_filtered_by,
named_associated_type_shorthand_candidates,
},
next_solver::{
AdtDef, Binder, Clause, Const, DbInterner, ErrorGuaranteed, Predicate, ProjectionPredicate,
Region, SolverDefId, TraitRef, Ty,
@ -501,137 +505,40 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
let Some(res) = res else {
return Ty::new_error(self.ctx.interner, ErrorGuaranteed);
};
let segment = self.current_or_prev_segment;
let assoc_name = segment.name;
let db = self.ctx.db;
let def = self.ctx.def;
let mut search = |t: TraitRef<'db>| {
let trait_id = match t.def_id {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
let mut checked_traits = FxHashSet::default();
let mut check_trait = |trait_id: TraitId| {
let name = &db.trait_signature(trait_id).name;
tracing::debug!(?trait_id, ?name);
if !checked_traits.insert(trait_id) {
return None;
}
let data = trait_id.trait_items(db);
tracing::debug!(?data.items);
for (name, assoc_id) in &data.items {
if let &AssocItemId::TypeAliasId(alias) = assoc_id {
if name != assoc_name {
continue;
}
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`t.substitution`).
let substs = self.substs_from_path_segment(alias.into(), false, None, true);
let substs = crate::next_solver::GenericArgs::new_from_iter(
interner,
t.args.iter().chain(substs.iter().skip(t.args.len())),
);
return Some(Ty::new_alias(
interner,
AliasTyKind::Projection,
AliasTy::new(interner, alias.into(), substs),
));
}
}
None
};
let mut stack: SmallVec<[_; 4]> = smallvec![trait_id];
while let Some(trait_def_id) = stack.pop() {
if let Some(alias) = check_trait(trait_def_id) {
return alias;
}
for pred in generic_predicates_filtered_by(
db,
GenericDefId::TraitId(trait_def_id),
PredicateFilter::SelfTrait,
|pred| pred == GenericDefId::TraitId(trait_def_id),
)
.0
.deref()
{
tracing::debug!(?pred);
let trait_id = match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(pred) => pred.def_id(),
_ => continue,
};
let trait_id = match trait_id {
SolverDefId::TraitId(trait_id) => trait_id,
_ => continue,
};
stack.push(trait_id);
}
tracing::debug!(?stack);
let segment = self.current_or_prev_segment;
let assoc_name = segment.name;
let mut check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| {
if name != assoc_name {
return None;
}
Ty::new_error(interner, ErrorGuaranteed)
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`t.substitution`).
let substs = self.substs_from_path_segment(associated_ty.into(), false, None, true);
let substs = crate::next_solver::GenericArgs::new_from_iter(
interner,
t.args.iter().chain(substs.iter().skip(t.args.len())),
);
Some(Ty::new_alias(
interner,
AliasTyKind::Projection,
AliasTy::new(interner, associated_ty.into(), substs),
))
};
match res {
TypeNs::SelfType(impl_id) => {
let trait_ref = db.impl_trait_ns(impl_id);
let Some(trait_ref) = trait_ref else {
return Ty::new_error(interner, ErrorGuaranteed);
};
// we're _in_ the impl -- the binders get added back later. Correct,
// but it would be nice to make this more explicit
search(trait_ref.skip_binder())
}
TypeNs::GenericParam(param_id) => {
// Handle `Self::Type` referring to own associated type in trait definitions
// This *must* be done first to avoid cycles with
// `generic_predicates_for_param`, but not sure that it's sufficient,
// see FIXME in `search`.
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_name = &db.trait_signature(trait_id).name;
tracing::debug!(?trait_name);
let trait_generics = generics(db, trait_id.into());
tracing::debug!(?trait_generics);
if trait_generics[param_id.local_id()].is_trait_self() {
let args = crate::next_solver::GenericArgs::identity_for_item(
interner,
trait_id.into(),
);
let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args);
tracing::debug!(?args, ?trait_ref);
return search(trait_ref);
}
}
let predicates = db.generic_predicates_for_param_ns(
def,
param_id.into(),
Some(segment.name.clone()),
);
predicates
.iter()
.find_map(|pred| match (*pred).kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate),
_ => None,
})
.map(|trait_predicate| {
let trait_ref = trait_predicate.trait_ref;
assert!(
!trait_ref.has_escaping_bound_vars(),
"FIXME unexpected higher-ranked trait bound"
);
search(trait_ref)
})
.unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed))
}
_ => Ty::new_error(interner, ErrorGuaranteed),
}
named_associated_type_shorthand_candidates(
interner,
def,
res,
Some(assoc_name.clone()),
check_alias,
)
.unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed))
}
fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty<'db> {

View file

@ -3,8 +3,6 @@
//! Chalk (in both directions); plus some helper functions for more specialized
//! conversions.
use chalk_solve::rust_ir;
use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
use salsa::{
Id,
@ -54,23 +52,6 @@ impl ToChalk for CallableDefId {
}
}
pub(crate) struct TypeAliasAsValue(pub(crate) TypeAliasId);
impl ToChalk for TypeAliasAsValue {
type Chalk = chalk_db::AssociatedTyValueId;
fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
rust_ir::AssociatedTyValueId(self.0.as_id())
}
fn from_chalk(
_db: &dyn HirDatabase,
assoc_ty_value_id: chalk_db::AssociatedTyValueId,
) -> TypeAliasAsValue {
TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0))
}
}
impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
fn from(id: OpaqueTyId) -> Self {
FromId::from_id(id.0)

View file

@ -107,7 +107,7 @@ pub enum OperandKind {
}
impl Operand {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'static>, ty: Ty) -> Self {
Operand {
kind: OperandKind::Constant(intern_const_scalar(
ConstScalar::Bytes(data, memory_map),

View file

@ -25,21 +25,26 @@ use rustc_apfloat::{
ieee::{Half as f16, Quad as f128},
};
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use span::FileId;
use stdx::never;
use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId,
Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, Interner,
MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
consteval_nextsolver,
db::{HirDatabase, InternedClosure},
display::{ClosureStyle, DisplayTarget, HirDisplay},
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
mapping::from_chalk,
method_resolution::{is_dyn_method, lookup_impl_const},
next_solver::{
Ctor, DbInterner, SolverDefId,
mapping::{ChalkToNextSolver, convert_args_for_result, convert_ty_for_result},
},
static_lifetime,
traits::FnTrait,
utils::{ClosureSubst, detect_variant_from_bytes},
@ -78,31 +83,31 @@ macro_rules! not_supported {
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct VTableMap {
ty_to_id: FxHashMap<Ty, usize>,
id_to_ty: Vec<Ty>,
pub struct VTableMap<'db> {
ty_to_id: FxHashMap<crate::next_solver::Ty<'db>, usize>,
id_to_ty: Vec<crate::next_solver::Ty<'db>>,
}
impl VTableMap {
impl<'db> VTableMap<'db> {
const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
fn id(&mut self, ty: Ty) -> usize {
fn id(&mut self, ty: crate::next_solver::Ty<'db>) -> usize {
if let Some(it) = self.ty_to_id.get(&ty) {
return *it;
}
let id = self.id_to_ty.len() + VTableMap::OFFSET;
self.id_to_ty.push(ty.clone());
self.id_to_ty.push(ty);
self.ty_to_id.insert(ty, id);
id
}
pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
pub(crate) fn ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>> {
id.checked_sub(VTableMap::OFFSET)
.and_then(|id| self.id_to_ty.get(id))
.and_then(|id| self.id_to_ty.get(id).copied())
.ok_or(MirEvalError::InvalidVTableId(id))
}
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<crate::next_solver::Ty<'db>> {
let id = from_bytes!(usize, bytes);
self.ty(id)
}
@ -170,12 +175,12 @@ pub struct Evaluator<'a> {
/// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we
/// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the
/// time of use.
vtable_map: VTableMap,
vtable_map: VTableMap<'a>,
thread_local_storage: TlsData,
random_state: oorandom::Rand64,
stdout: Vec<u8>,
stderr: Vec<u8>,
layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>,
layout_cache: RefCell<FxHashMap<crate::next_solver::Ty<'a>, Arc<Layout>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
@ -224,7 +229,7 @@ impl Interval {
Self { addr, size }
}
fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
memory.read_memory(self.addr, self.size)
}
@ -242,7 +247,7 @@ impl Interval {
}
impl IntervalAndTy {
fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
memory.read_memory(self.interval.addr, self.interval.size)
}
@ -269,7 +274,7 @@ impl From<Interval> for IntervalOrOwned {
}
impl IntervalOrOwned {
fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
Ok(match self {
IntervalOrOwned::Owned(o) => o,
IntervalOrOwned::Borrowed(b) => b.get(memory)?,
@ -608,7 +613,13 @@ pub fn interpret_mir(
memory_map.vtable.shrink_to_fit();
MemoryMap::Complex(Box::new(memory_map))
};
Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty))
// SAFETY: will never use this without a db
Ok(intern_const_scalar(
ConstScalar::Bytes(bytes, unsafe {
std::mem::transmute::<MemoryMap<'_>, MemoryMap<'static>>(memory_map)
}),
ty,
))
})();
Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))
}
@ -618,7 +629,7 @@ const EXECUTION_LIMIT: usize = 100_000;
#[cfg(not(test))]
const EXECUTION_LIMIT: usize = 10_000_000;
impl Evaluator<'_> {
impl<'db> Evaluator<'db> {
pub fn new(
db: &dyn HirDatabase,
owner: DefWithBodyId,
@ -719,6 +730,7 @@ impl Evaluator<'_> {
p: &Place,
locals: &'a Locals,
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
let interner = DbInterner::new_with(self.db, None, None);
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
@ -791,19 +803,19 @@ impl Evaluator<'_> {
addr = addr.offset(ty_size * (from as usize));
}
&ProjectionElem::ClosureField(f) => {
let layout = self.layout(&prev_ty)?;
let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let offset = layout.fields.offset(f).bytes_usize();
addr = addr.offset(offset);
metadata = None;
}
ProjectionElem::Field(Either::Right(f)) => {
let layout = self.layout(&prev_ty)?;
let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let offset = layout.fields.offset(f.index as usize).bytes_usize();
addr = addr.offset(offset);
metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized
}
ProjectionElem::Field(Either::Left(f)) => {
let layout = self.layout(&prev_ty)?;
let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let variant_layout = match &layout.variants {
Variants::Single { .. } | Variants::Empty => &layout,
Variants::Multiple { variants, .. } => {
@ -835,20 +847,28 @@ impl Evaluator<'_> {
Ok((addr, ty, metadata))
}
fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
if let Some(x) = self.layout_cache.borrow().get(ty) {
fn layout(&self, ty: crate::next_solver::Ty<'db>) -> Result<Arc<Layout>> {
if let Some(x) = self.layout_cache.borrow().get(&ty) {
return Ok(x.clone());
}
let interner = DbInterner::new_with(self.db, None, None);
let r = self
.db
.layout_of_ty(ty.clone(), self.trait_env.clone())
.map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?;
self.layout_cache.borrow_mut().insert(ty.clone(), r.clone());
.layout_of_ty(ty, self.trait_env.clone())
.map_err(|e| MirEvalError::LayoutError(e, convert_ty_for_result(interner, ty)))?;
self.layout_cache.borrow_mut().insert(ty, r.clone());
Ok(r)
}
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
let interner = DbInterner::new_with(self.db, None, None);
self.layout(crate::next_solver::Ty::new(
interner,
rustc_type_ir::TyKind::Adt(
crate::next_solver::AdtDef::new(adt, interner),
subst.to_nextsolver(interner),
),
))
}
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
@ -952,7 +972,7 @@ impl Evaluator<'_> {
)?
}
TyKind::FnDef(def, generic_args) => self.exec_fn_def(
*def,
CallableDefId::from_chalk(self.db, *def),
generic_args,
destination_interval,
&args,
@ -1113,6 +1133,7 @@ impl Evaluator<'_> {
}
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> {
let interner = DbInterner::new_with(self.db, None, None);
use IntervalOrOwned::*;
Ok(match r {
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
@ -1436,7 +1457,7 @@ impl Evaluator<'_> {
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(ty)?;
let layout = self.layout(ty.to_nextsolver(interner))?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1467,7 +1488,7 @@ impl Evaluator<'_> {
)?)
}
AggregateKind::Closure(ty) => {
let layout = self.layout(ty)?;
let layout = self.layout(ty.to_nextsolver(interner))?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1484,6 +1505,8 @@ impl Evaluator<'_> {
if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
&current_ty.kind(Interner)
{
let interner = DbInterner::new_with(self.db, None, None);
let current_ty = current_ty.to_nextsolver(interner);
let id = self.vtable_map.id(current_ty);
let ptr_size = self.ptr_size();
Owned(id.to_le_bytes()[0..ptr_size].to_vec())
@ -1623,7 +1646,8 @@ impl Evaluator<'_> {
}
fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> {
let layout = self.layout(&ty)?;
let interner = DbInterner::new_with(self.db, None, None);
let layout = self.layout(ty.to_nextsolver(interner))?;
let &TyKind::Adt(chalk_ir::AdtId(AdtId::EnumId(e)), _) = ty.kind(Interner) else {
return Ok(0);
};
@ -1732,6 +1756,8 @@ impl Evaluator<'_> {
}
},
TyKind::Dyn(_) => {
let interner = DbInterner::new_with(self.db, None, None);
let current_ty = current_ty.to_nextsolver(interner);
let vtable = self.vtable_map.id(current_ty);
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
@ -1777,6 +1803,7 @@ impl Evaluator<'_> {
subst: Substitution,
locals: &Locals,
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
let interner = DbInterner::new_with(self.db, None, None);
let adt = it.adt_id(self.db);
if let DefWithBodyId::VariantId(f) = locals.body.owner
&& let VariantId::EnumVariantId(it) = it
@ -1786,7 +1813,11 @@ impl Evaluator<'_> {
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
// infinite sized type errors) we use a dummy layout
let i = self.const_eval_discriminant(it)?;
return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
return Ok((
16,
self.layout(crate::next_solver::Ty::new_empty_tuple(interner))?,
Some((0, 16, i)),
));
}
let layout = self.layout_adt(adt, subst)?;
Ok(match &layout.variants {
@ -1885,6 +1916,7 @@ impl Evaluator<'_> {
#[allow(clippy::double_parens)]
fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
let interner = DbInterner::new_with(self.db, None, None);
let ConstData { ty, value: chalk_ir::ConstValue::Concrete(c) } = &konst.data(Interner)
else {
not_supported!("evaluating non concrete constant");
@ -1945,7 +1977,7 @@ impl Evaluator<'_> {
MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes),
},
addr,
ty,
ty.to_nextsolver(interner),
locals,
)?;
Ok(Interval::new(addr, size))
@ -2048,7 +2080,8 @@ impl Evaluator<'_> {
}
fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
if let Some(layout) = self.layout_cache.borrow().get(ty) {
let interner = DbInterner::new_with(self.db, None, None);
if let Some(layout) = self.layout_cache.borrow().get(&ty.to_nextsolver(interner)) {
return Ok(layout
.is_sized()
.then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
@ -2061,7 +2094,7 @@ impl Evaluator<'_> {
// infinite sized type errors) we use a dummy size
return Ok(Some((16, 16)));
}
let layout = self.layout(ty);
let layout = self.layout(ty.to_nextsolver(interner));
if self.assert_placeholder_ty_is_unused
&& matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _)))
{
@ -2129,15 +2162,16 @@ impl Evaluator<'_> {
bytes: &[u8],
ty: &Ty,
locals: &Locals,
) -> Result<ComplexMemoryMap> {
fn rec(
this: &Evaluator<'_>,
) -> Result<ComplexMemoryMap<'db>> {
fn rec<'db>(
this: &Evaluator<'db>,
bytes: &[u8],
ty: &Ty,
locals: &Locals,
mm: &mut ComplexMemoryMap,
mm: &mut ComplexMemoryMap<'db>,
stack_depth_limit: usize,
) -> Result<()> {
let interner = DbInterner::new_with(this.db, None, None);
if stack_depth_limit.checked_sub(1).is_none() {
return Err(MirEvalError::StackOverflow);
}
@ -2158,13 +2192,14 @@ impl Evaluator<'_> {
let element_size = match t.kind(Interner) {
TyKind::Str => 1,
TyKind::Slice(t) => {
check_inner = Some(t);
check_inner = Some(t.clone());
this.size_of_sized(t, locals, "slice inner type")?
}
TyKind::Dyn(_) => {
let t = this.vtable_map.ty_of_bytes(meta)?;
check_inner = Some(t);
this.size_of_sized(t, locals, "dyn concrete type")?
let t = convert_ty_for_result(interner, t);
check_inner = Some(t.clone());
this.size_of_sized(&t, locals, "dyn concrete type")?
}
_ => return Ok(()),
};
@ -2176,7 +2211,7 @@ impl Evaluator<'_> {
let addr = Address::from_bytes(addr)?;
let b = this.read_memory(addr, size)?;
mm.insert(addr.to_usize(), b.into());
if let Some(ty) = check_inner {
if let Some(ty) = &check_inner {
for i in 0..count {
let offset = element_size * i;
rec(
@ -2211,11 +2246,11 @@ impl Evaluator<'_> {
}
}
TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
let layout = this.layout(ty.to_nextsolver(interner))?;
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
let size = this.layout(ty)?.size.bytes_usize();
let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@ -2229,7 +2264,7 @@ impl Evaluator<'_> {
TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
let data = s.fields(this.db);
let layout = this.layout(ty)?;
let layout = this.layout(ty.to_nextsolver(interner))?;
let field_types = this.db.field_types(s.into());
for (f, _) in data.fields().iter() {
let offset = layout
@ -2237,7 +2272,7 @@ impl Evaluator<'_> {
.offset(u32::from(f.into_raw()) as usize)
.bytes_usize();
let ty = &field_types[f].clone().substitute(Interner, subst);
let size = this.layout(ty)?.size.bytes_usize();
let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@ -2249,7 +2284,7 @@ impl Evaluator<'_> {
}
}
AdtId::EnumId(e) => {
let layout = this.layout(ty)?;
let layout = this.layout(ty.to_nextsolver(interner))?;
if let Some((v, l)) = detect_variant_from_bytes(
&layout,
this.db,
@ -2263,7 +2298,8 @@ impl Evaluator<'_> {
let offset =
l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
let ty = &field_types[f].clone().substitute(Interner, subst);
let size = this.layout(ty)?.size.bytes_usize();
let size =
this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@ -2290,20 +2326,26 @@ impl Evaluator<'_> {
Ok(mm)
}
fn patch_addresses<'vtable>(
fn patch_addresses(
&mut self,
patch_map: &FxHashMap<usize, usize>,
ty_of_bytes: impl Fn(&[u8]) -> Result<&'vtable Ty> + Copy,
ty_of_bytes: impl Fn(&[u8]) -> Result<crate::next_solver::Ty<'db>> + Copy,
addr: Address,
ty: &Ty,
ty: crate::next_solver::Ty<'db>,
locals: &Locals,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
// FIXME: support indirect references
let layout = self.layout(ty)?;
let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
match ty.kind(Interner) {
TyKind::Ref(_, _, t) => {
let size = self.size_align_of(t, locals)?;
let my_size = self.size_of_sized(
&convert_ty_for_result(interner, ty),
locals,
"value to patch address",
)?;
use rustc_type_ir::TyKind;
match ty.kind() {
TyKind::Ref(_, t, _) => {
let size = self.size_align_of(&convert_ty_for_result(interner, t), locals)?;
match size {
Some(_) => {
let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
@ -2319,27 +2361,27 @@ impl Evaluator<'_> {
}
}
}
TyKind::Function(_) => {
let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?.clone();
TyKind::FnPtr(_, _) => {
let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?;
let new_id = self.vtable_map.id(ty);
self.write_memory(addr, &new_id.to_le_bytes())?;
}
TyKind::Adt(id, subst) => match id.0 {
AdtId::StructId(s) => {
for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
TyKind::Adt(id, args) => match id.def_id() {
SolverDefId::AdtId(AdtId::StructId(s)) => {
for (i, (_, ty)) in self.db.field_types_ns(s.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.clone().substitute(Interner, subst);
let ty = ty.instantiate(interner, args);
self.patch_addresses(
patch_map,
ty_of_bytes,
addr.offset(offset),
&ty,
ty,
locals,
)?;
}
}
AdtId::UnionId(_) => (),
AdtId::EnumId(e) => {
SolverDefId::AdtId(AdtId::UnionId(_)) => (),
SolverDefId::AdtId(AdtId::EnumId(e)) => {
if let Some((ev, layout)) = detect_variant_from_bytes(
&layout,
self.db,
@ -2347,33 +2389,37 @@ impl Evaluator<'_> {
self.read_memory(addr, layout.size.bytes_usize())?,
e,
) {
for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
for (i, (_, ty)) in self.db.field_types_ns(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.clone().substitute(Interner, subst);
let ty = ty.instantiate(interner, args);
self.patch_addresses(
patch_map,
ty_of_bytes,
addr.offset(offset),
&ty,
ty,
locals,
)?;
}
}
}
_ => unreachable!(),
},
TyKind::Tuple(_, subst) => {
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
TyKind::Tuple(tys) => {
for (id, ty) in tys.iter().enumerate() {
let offset = layout.fields.offset(id).bytes_usize();
self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?;
}
}
TyKind::Array(inner, len) => {
let len = match try_const_usize(self.db, len) {
let len = match consteval_nextsolver::try_const_usize(self.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
let size = self.size_of_sized(inner, locals, "inner of array")?;
let size = self.size_of_sized(
&convert_ty_for_result(interner, inner),
locals,
"inner of array",
)?;
for i in 0..len {
self.patch_addresses(
patch_map,
@ -2384,11 +2430,13 @@ impl Evaluator<'_> {
)?;
}
}
TyKind::AssociatedType(_, _)
| TyKind::Scalar(_)
TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_)
| TyKind::Slice(_)
| TyKind::Raw(_, _)
| TyKind::OpaqueType(_, _)
| TyKind::RawPtr(_, _)
| TyKind::FnDef(_, _)
| TyKind::Str
| TyKind::Never
@ -2396,12 +2444,16 @@ impl Evaluator<'_> {
| TyKind::Coroutine(_, _)
| TyKind::CoroutineWitness(_, _)
| TyKind::Foreign(_)
| TyKind::Error
| TyKind::Error(_)
| TyKind::Placeholder(_)
| TyKind::Dyn(_)
| TyKind::Alias(_)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => (),
| TyKind::Dynamic(_, _, _)
| TyKind::Alias(_, _)
| TyKind::Bound(_, _)
| TyKind::Infer(_)
| TyKind::Pat(_, _)
| TyKind::Param(_)
| TyKind::UnsafeBinder(_)
| TyKind::CoroutineClosure(_, _) => (),
}
Ok(())
}
@ -2416,13 +2468,41 @@ impl Evaluator<'_> {
span: MirSpan,
) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?.clone();
match next_ty.kind(Interner) {
let next_ty = self.vtable_map.ty(id)?;
let interner = DbInterner::new_with(self.db, None, None);
use rustc_type_ir::TyKind;
match next_ty.kind() {
TyKind::FnDef(def, generic_args) => {
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
let def = match def {
SolverDefId::FunctionId(id) => CallableDefId::FunctionId(id),
SolverDefId::Ctor(Ctor::Struct(s)) => CallableDefId::StructId(s),
SolverDefId::Ctor(Ctor::Enum(e)) => CallableDefId::EnumVariantId(e),
_ => unreachable!(),
};
self.exec_fn_def(
def,
&convert_args_for_result(interner, generic_args.as_slice()),
destination,
args,
locals,
target_bb,
span,
)
}
TyKind::Closure(id, subst) => {
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
TyKind::Closure(id, generic_args) => {
let id = match id {
SolverDefId::InternedClosureId(id) => id,
_ => unreachable!(),
};
self.exec_closure(
id.into(),
bytes.slice(0..0),
&convert_args_for_result(interner, generic_args.as_slice()),
destination,
args,
locals,
span,
)
}
_ => Err(MirEvalError::InternalError("function pointer to non function".into())),
}
@ -2469,7 +2549,7 @@ impl Evaluator<'_> {
fn exec_fn_def(
&mut self,
def: FnDefId,
def: CallableDefId,
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
@ -2477,7 +2557,6 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
let def: CallableDefId = from_chalk(self.db, def);
let generic_args = generic_args.clone();
match def {
CallableDefId::FunctionId(def) => {
@ -2574,6 +2653,7 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
let interner = DbInterner::new_with(self.db, None, None);
if self.detect_and_exec_special_function(
def,
args,
@ -2600,6 +2680,7 @@ impl Evaluator<'_> {
.vtable_map
.ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
let mut args_for_target = args.to_vec();
let ty = convert_ty_for_result(interner, ty);
args_for_target[0] = IntervalAndTy {
interval: args_for_target[0].interval.slice(0..self.ptr_size()),
ty: ty.clone(),
@ -2672,6 +2753,7 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
let interner = DbInterner::new_with(self.db, None, None);
let func = args
.first()
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
@ -2683,15 +2765,21 @@ impl Evaluator<'_> {
let id =
from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]);
func_data = func_data.slice(0..self.ptr_size());
func_ty = self.vtable_map.ty(id)?.clone();
func_ty = convert_ty_for_result(interner, self.vtable_map.ty(id)?);
}
let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
}
match &func_ty.kind(Interner) {
TyKind::FnDef(def, subst) => {
self.exec_fn_def(*def, subst, destination, &args[1..], locals, target_bb, span)
}
TyKind::FnDef(def, subst) => self.exec_fn_def(
CallableDefId::from_chalk(self.db, *def),
subst,
destination,
&args[1..],
locals,
target_bb,
span,
),
TyKind::Function(_) => {
self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span)
}
@ -2714,7 +2802,7 @@ impl Evaluator<'_> {
Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
)
.intern(Interner);
let layout = self.layout(&ty)?;
let layout = self.layout(ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -2901,6 +2989,7 @@ pub fn render_const_using_debug_impl(
owner: DefWithBodyId,
c: &Const,
) -> Result<String> {
let interner = DbInterner::new_with(db, None, None);
let mut evaluator = Evaluator::new(db, owner, false, None)?;
let locals = &Locals {
ptr: ArenaMap::new(),
@ -2933,7 +3022,8 @@ pub fn render_const_using_debug_impl(
CallableDefId::FunctionId(debug_fmt_fn).to_chalk(db),
Substitution::from1(Interner, c.data(Interner).ty.clone()),
)
.intern(Interner));
.intern(Interner)
.to_nextsolver(interner));
evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
// a3 = ::core::fmt::Arguments::new_v1(a1, a2)
// FIXME: similarly, we should call function here, not directly working with memory.

View file

@ -23,6 +23,10 @@ use crate::{
LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
Ty, TyBuilder, TyExt, pad16,
},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, convert_ty_for_result},
},
};
mod simd;
@ -171,6 +175,7 @@ impl Evaluator<'_> {
destination: Interval,
span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
match self_ty.kind(Interner) {
TyKind::Function(_) => {
let [arg] = args else {
@ -188,7 +193,7 @@ impl Evaluator<'_> {
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into());
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
let layout = self.layout(&self_ty)?;
let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = captures.iter().map(|c| c.ty(subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
@ -197,7 +202,7 @@ impl Evaluator<'_> {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let layout = self.layout(&self_ty)?;
let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone());
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
@ -226,8 +231,9 @@ impl Evaluator<'_> {
destination: Interval,
span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
for (i, ty) in ty_iter.enumerate() {
let size = self.layout(&ty)?.size.bytes_usize();
let size = self.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
@ -592,6 +598,7 @@ impl Evaluator<'_> {
span: MirSpan,
needs_override: bool,
) -> Result<bool> {
let interner = DbInterner::new_with(self.db, None, None);
if let Some(name) = name.strip_prefix("atomic_") {
return self
.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span)
@ -769,7 +776,7 @@ impl Evaluator<'_> {
"align_of generic arg is not provided".into(),
));
};
let align = self.layout(ty)?.align.abi.bytes();
let align = self.layout(ty.to_nextsolver(interner))?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"size_of_val" => {
@ -1025,7 +1032,7 @@ impl Evaluator<'_> {
let is_overflow = u128overflow
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let layout = self.layout(result_ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1249,7 +1256,7 @@ impl Evaluator<'_> {
"const_eval_select arg[0] is not a tuple".into(),
));
};
let layout = self.layout(&tuple.ty)?;
let layout = self.layout(tuple.ty.to_nextsolver(interner))?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let offset = layout.fields.offset(i).bytes_usize();
@ -1408,6 +1415,7 @@ impl Evaluator<'_> {
metadata: Interval,
locals: &Locals,
) -> Result<(usize, usize)> {
let interner = DbInterner::new_with(self.db, None, None);
Ok(match ty.kind(Interner) {
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
TyKind::Slice(inner) => {
@ -1416,7 +1424,7 @@ impl Evaluator<'_> {
(size * len, align)
}
TyKind::Dyn(_) => self.size_align_of_sized(
self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
&convert_ty_for_result(interner, self.vtable_map.ty_of_bytes(metadata.get(self)?)?),
locals,
"dyn concrete type",
)?,
@ -1463,6 +1471,7 @@ impl Evaluator<'_> {
locals: &Locals,
_span: MirSpan,
) -> Result<()> {
let interner = DbInterner::new_with(self.db, None, None);
// We are a single threaded runtime with no UB checking and no optimization, so
// we can implement atomic intrinsics as normal functions.
@ -1560,7 +1569,7 @@ impl Evaluator<'_> {
Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let layout = self.layout(&result_ty)?;
let layout = self.layout(result_ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,

View file

@ -37,7 +37,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
)
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
let (result, output) = interpret_mir(db, body, false, None)?;
let (result, output) = salsa::attach(db, || interpret_mir(db, body, false, None))?;
result?;
Ok((output.stdout().into_owned(), output.stderr().into_owned()))
}

View file

@ -43,6 +43,7 @@ use crate::{
Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar,
return_slot,
},
next_solver::{DbInterner, mapping::ChalkToNextSolver},
static_lifetime,
traits::FnTrait,
utils::ClosureSubst,
@ -1411,8 +1412,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size =
|| self.db.layout_of_ty(ty.clone(), self.env.clone()).map(|it| it.size.bytes_usize());
let interner = DbInterner::new_with(self.db, None, None);
let size = || {
self.db
.layout_of_ty(ty.to_nextsolver(interner), self.env.clone())
.map(|it| it.size.bytes_usize())
};
const USIZE_SIZE: usize = size_of::<usize>();
let bytes: Box<[_]> = match l {
hir_def::hir::Literal::String(b) => {

View file

@ -103,7 +103,7 @@ pub struct ValueConst<'db> {
}
impl<'db> ValueConst<'db> {
pub fn new(ty: Ty<'db>, bytes: ConstBytes) -> Self {
pub fn new(ty: Ty<'db>, bytes: ConstBytes<'db>) -> Self {
let value = Valtree::new(bytes);
ValueConst { ty, value }
}
@ -141,9 +141,9 @@ impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for ValueConst<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstBytes(pub Box<[u8]>, pub MemoryMap);
pub struct ConstBytes<'db>(pub Box<[u8]>, pub MemoryMap<'db>);
impl Hash for ConstBytes {
impl Hash for ConstBytes<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
@ -152,11 +152,11 @@ impl Hash for ConstBytes {
#[salsa::interned(constructor = new_, debug)]
pub struct Valtree<'db> {
#[returns(ref)]
bytes_: ConstBytes,
bytes_: ConstBytes<'db>,
}
impl<'db> Valtree<'db> {
pub fn new(bytes: ConstBytes) -> Self {
pub fn new(bytes: ConstBytes<'db>) -> Self {
salsa::with_attached_database(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
@ -164,7 +164,7 @@ impl<'db> Valtree<'db> {
.unwrap()
}
pub fn inner(&self) -> &ConstBytes {
pub fn inner(&self) -> &ConstBytes<'db> {
salsa::with_attached_database(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will

View file

@ -35,6 +35,29 @@ impl<'db> std::fmt::Debug for GenericArg<'db> {
}
}
impl<'db> GenericArg<'db> {
pub fn ty(self) -> Option<Ty<'db>> {
match self.kind() {
GenericArgKind::Type(ty) => Some(ty),
_ => None,
}
}
pub fn expect_ty(self) -> Ty<'db> {
match self.kind() {
GenericArgKind::Type(ty) => ty,
_ => panic!("Expected ty, got {:?}", self),
}
}
pub fn region(self) -> Option<Region<'db>> {
match self.kind() {
GenericArgKind::Lifetime(r) => Some(r),
_ => None,
}
}
}
impl<'db> From<Term<'db>> for GenericArg<'db> {
fn from(value: Term<'db>) -> Self {
match value {
@ -263,7 +286,9 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
interner: DbInterner<'db>,
def_id: <DbInterner<'db> as rustc_type_ir::Interner>::DefId,
) -> <DbInterner<'db> as rustc_type_ir::Interner>::GenericArgs {
Self::for_item(interner, def_id, |name, index, kind, _| mk_param(index, name, kind))
Self::for_item(interner, def_id, |name, index, kind, _| {
mk_param(interner, index, name, kind)
})
}
fn extend_with_error(
@ -383,16 +408,19 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
}
}
pub fn mk_param<'db>(index: u32, name: &Symbol, kind: GenericParamDefKind) -> GenericArg<'db> {
pub fn mk_param<'db>(
interner: DbInterner<'db>,
index: u32,
name: &Symbol,
kind: GenericParamDefKind,
) -> GenericArg<'db> {
let name = name.clone();
match kind {
GenericParamDefKind::Lifetime => {
Region::new_early_param(DbInterner::conjure(), EarlyParamRegion { index }).into()
}
GenericParamDefKind::Type => Ty::new_param(DbInterner::conjure(), index, name).into(),
GenericParamDefKind::Const => {
Const::new_param(DbInterner::conjure(), ParamConst { index }).into()
Region::new_early_param(interner, EarlyParamRegion { index }).into()
}
GenericParamDefKind::Type => Ty::new_param(interner, index, name).into(),
GenericParamDefKind::Const => Const::new_param(interner, ParamConst { index }).into(),
}
}

View file

@ -18,13 +18,14 @@ use rustc_type_ir::{
shift_vars,
solve::Goal,
};
use salsa::plumbing::AsId;
use salsa::plumbing::FromId;
use salsa::{Id, plumbing::AsId};
use crate::{
ConcreteConst, ConstScalar, ImplTraitId, Interner,
ConcreteConst, ConstScalar, ImplTraitId, Interner, MemoryMap,
db::{
HirDatabase, InternedClosureId, InternedCoroutineId, InternedOpaqueTyId,
InternedTypeOrConstParamId,
HirDatabase, InternedClosureId, InternedCoroutineId, InternedLifetimeParamId,
InternedOpaqueTyId, InternedTypeOrConstParamId,
},
from_assoc_type_id, from_chalk_trait_id,
mapping::ToChalk,
@ -55,6 +56,24 @@ pub fn to_placeholder_idx<T: Clone + std::fmt::Debug>(
}
}
pub fn bound_var_to_type_or_const_param_idx(
db: &dyn HirDatabase,
var: rustc_type_ir::BoundVar,
) -> TypeOrConstParamId {
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
let interned_id = InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(var.as_u32()) });
interned_id.loc(db)
}
pub fn bound_var_to_lifetime_idx(
db: &dyn HirDatabase,
var: rustc_type_ir::BoundVar,
) -> LifetimeParamId {
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
let interned_id = InternedLifetimeParamId::from_id(unsafe { Id::from_index(var.as_u32()) });
interned_id.loc(db)
}
pub fn convert_binder_to_early_binder<'db, T: rustc_type_ir::TypeFoldable<DbInterner<'db>>>(
interner: DbInterner<'db>,
binder: rustc_type_ir::Binder<DbInterner<'db>, T>,
@ -1290,7 +1309,10 @@ pub(crate) fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>)
.intern(Interner)
}
fn convert_const_for_result<'db>(interner: DbInterner<'db>, const_: Const<'db>) -> crate::Const {
pub fn convert_const_for_result<'db>(
interner: DbInterner<'db>,
const_: Const<'db>,
) -> crate::Const {
let value: chalk_ir::ConstValue<Interner> = match const_.kind() {
rustc_type_ir::ConstKind::Param(_) => unimplemented!(),
rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var(var)) => {
@ -1325,7 +1347,10 @@ fn convert_const_for_result<'db>(interner: DbInterner<'db>, const_: Const<'db>)
rustc_type_ir::ConstKind::Value(value_const) => {
let bytes = value_const.value.inner();
let value = chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: ConstScalar::Bytes(bytes.0.clone(), bytes.1.clone()),
// SAFETY: we will never actually use this without a database
interned: ConstScalar::Bytes(bytes.0.clone(), unsafe {
std::mem::transmute::<MemoryMap<'db>, MemoryMap<'static>>(bytes.1.clone())
}),
});
return chalk_ir::ConstData {
ty: convert_ty_for_result(interner, value_const.ty),
@ -1343,7 +1368,7 @@ fn convert_const_for_result<'db>(interner: DbInterner<'db>, const_: Const<'db>)
chalk_ir::ConstData { ty: crate::TyKind::Error.intern(Interner), value }.intern(Interner)
}
fn convert_region_for_result<'db>(region: Region<'db>) -> crate::Lifetime {
pub fn convert_region_for_result<'db>(region: Region<'db>) -> crate::Lifetime {
match region.kind() {
rustc_type_ir::RegionKind::ReEarlyParam(early) => unimplemented!(),
rustc_type_ir::RegionKind::ReBound(db, bound) => chalk_ir::Lifetime::new(

View file

@ -11,16 +11,25 @@ use rustc_type_ir::{
use crate::next_solver::{
Binder, Const, ConstKind, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Span, Term, Ty,
TyKind,
TyKind, TypingMode,
fulfill::{FulfillmentCtxt, NextSolverError},
infer::{
InferCtxt,
DbInternerInferExt, InferCtxt,
at::At,
traits::{Obligation, ObligationCause},
},
util::PlaceholderReplacer,
};
pub fn normalize<'db, T>(interner: DbInterner<'db>, param_env: ParamEnv<'db>, value: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
let infer_ctxt = interner.infer_ctxt().build(TypingMode::non_body_analysis());
let cause = ObligationCause::dummy();
deeply_normalize(infer_ctxt.at(&cause, param_env), value.clone()).unwrap_or(value)
}
/// Deeply normalize all aliases in `value`. This does not handle inference and expects
/// its input to be already fully resolved.
pub fn deeply_normalize<'db, T>(at: At<'_, 'db>, value: T) -> Result<T, Vec<NextSolverError<'db>>>

View file

@ -9,6 +9,7 @@ use rustc_type_ir::{
WithCachedTypeInfo,
inherent::{
AdtDef, BoundVarLike, GenericArgs as _, IntoKind, ParamLike, PlaceholderLike, SliceLike,
Ty as _,
},
relate::Relate,
solve::SizedTraitKind,
@ -107,6 +108,10 @@ impl<'db> Ty<'db> {
Ty::new_infer(interner, InferTy::FreshFloatTy(n))
}
pub fn new_empty_tuple(interner: DbInterner<'db>) -> Self {
Ty::new_tup(interner, &[])
}
/// Returns the `Size` for primitive types (bool, uint, int, char, float).
pub fn primitive_size(self, interner: DbInterner<'db>) -> Size {
match self.kind() {

View file

@ -34,6 +34,40 @@ pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
}
}
pub fn int_ty_to_string_ns(ty: rustc_type_ir::IntTy) -> &'static str {
use rustc_type_ir::IntTy;
match ty {
IntTy::Isize => "isize",
IntTy::I8 => "i8",
IntTy::I16 => "i16",
IntTy::I32 => "i32",
IntTy::I64 => "i64",
IntTy::I128 => "i128",
}
}
pub fn uint_ty_to_string_ns(ty: rustc_type_ir::UintTy) -> &'static str {
use rustc_type_ir::UintTy;
match ty {
UintTy::Usize => "usize",
UintTy::U8 => "u8",
UintTy::U16 => "u16",
UintTy::U32 => "u32",
UintTy::U64 => "u64",
UintTy::U128 => "u128",
}
}
pub fn float_ty_to_string_ns(ty: rustc_type_ir::FloatTy) -> &'static str {
use rustc_type_ir::FloatTy;
match ty {
FloatTy::F16 => "f16",
FloatTy::F32 => "f32",
FloatTy::F64 => "f64",
FloatTy::F128 => "f128",
}
}
pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy {
match t {
BuiltinInt::Isize => IntTy::Isize,

View file

@ -157,11 +157,13 @@ fn check_impl(
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
};
let actual = salsa::attach(&db, || {
if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
}
});
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
}
@ -173,11 +175,13 @@ fn check_impl(
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
};
let actual = salsa::attach(&db, || {
if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
}
});
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
if let Some(expected) = adjustments.remove(&range) {
@ -203,11 +207,13 @@ fn check_impl(
continue;
};
let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!(
"expected {}, got {}",
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target)
);
let actual = salsa::attach(&db, || {
format!(
"expected {}, got {}",
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target)
)
});
match mismatches.remove(&range) {
Some(annotation) => assert_eq!(actual, annotation),
None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
@ -402,7 +408,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
for (def, krate) in defs {
let (body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, body, source_map, krate);
salsa::attach(&db, || {
infer_def(infer, body, source_map, krate);
})
}
buf.truncate(buf.trim_end().len());

View file

@ -67,11 +67,13 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = capture
.ty
.skip_binders()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string();
let capture_ty = salsa::attach(db, || {
capture
.ty
.skip_binders()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string()
});
let spans = capture
.spans()
.iter()

View file

@ -71,7 +71,7 @@ fn test() {
let x = S3.baz();
//^ Binary<impl Foo + ?Sized, Unary<impl Bar + ?Sized>>
let y = x.1.0.bar();
//^ Unary<Bar::Item<impl Bar + ?Sized>>
//^ Unary<<impl Bar + ?Sized as Bar>::Item>
}
"#,
);

View file

@ -2299,10 +2299,10 @@ trait Foo {
}
"#,
expect![[r#"
83..86 'bar': Foo::Bar<Self, A, B>
83..86 'bar': <Self as Foo>::Bar<A, B>
105..133 '{ ... }': ()
119..120 '_': Foo::Bar<Self, A, B>
123..126 'bar': Foo::Bar<Self, A, B>
119..120 '_': <Self as Foo>::Bar<A, B>
123..126 'bar': <Self as Foo>::Bar<A, B>
"#]],
);
}

View file

@ -408,11 +408,11 @@ fn test<T: Iterable>() {
let x: <S as Iterable>::Item = 1;
// ^ u32
let y: <T as Iterable>::Item = u;
// ^ Iterable::Item<T>
// ^ <T as Iterable>::Item
let z: T::Item = u;
// ^ Iterable::Item<T>
// ^ <T as Iterable>::Item
let a: <T>::Item = u;
// ^ Iterable::Item<T>
// ^ <T as Iterable>::Item
}"#,
);
}
@ -454,7 +454,7 @@ impl<T> S<T> {
fn test<T: Iterable>() {
let s: S<T>;
s.foo();
// ^^^^^^^ Iterable::Item<T>
// ^^^^^^^ <T as Iterable>::Item
}"#,
);
}
@ -470,7 +470,7 @@ trait Foo {
type A;
fn test(a: Self::A, _: impl Bar) {
a;
//^ Foo::A<Self>
//^ <Self as Foo>::A
}
}"#,
);
@ -969,7 +969,7 @@ impl<T> ApplyL for RefMutL<T> {
fn test<T: ApplyL>() {
let y: <RefMutL<T> as ApplyL>::Out = no_matter;
y;
} //^ ApplyL::Out<T>
} //^ <T as ApplyL>::Out
"#,
);
}
@ -986,7 +986,7 @@ fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
fn test<T: ApplyL>(t: T) {
let y = foo(t);
y;
} //^ ApplyL::Out<T>
} //^ <T as ApplyL>::Out
"#,
);
}
@ -1695,7 +1695,7 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
}"#,
expect![[r#"
49..50 't': T
77..79 '{}': Trait::Type<T>
77..79 '{}': <T as Trait>::Type
111..112 't': T
122..124 '{}': U
154..155 't': T
@ -2293,7 +2293,7 @@ impl Trait for S2 {
}"#,
expect![[r#"
40..44 'self': &'? Self
46..47 'x': Trait::Item<Self>
46..47 'x': <Self as Trait>::Item
126..130 'self': &'? S
132..133 'x': u32
147..161 '{ let y = x; }': ()
@ -2410,7 +2410,7 @@ trait Trait2<T> {}
fn test<T: Trait>() where T: Trait2<T::Item> {
let x: T::Item = no_matter;
} //^^^^^^^^^ Trait::Item<T>
} //^^^^^^^^^ <T as Trait>::Item
"#,
);
}
@ -2445,7 +2445,7 @@ trait Trait {
fn test<T>() where T: Trait<OtherItem = T::Item> {
let x: T::Item = no_matter;
} //^^^^^^^^^ Trait::Item<T>
} //^^^^^^^^^ <T as Trait>::Item
"#,
);
}
@ -2475,7 +2475,7 @@ fn test<T>(t: T) where T: UnificationStoreMut {
t.push(x);
let y: Key<T>;
(x, y);
} //^^^^^^ (UnificationStoreBase::Key<T>, UnificationStoreBase::Key<T>)
} //^^^^^^ (<T as UnificationStoreBase>::Key, <T as UnificationStoreBase>::Key)
"#,
);
}
@ -3488,7 +3488,7 @@ fn foo() {
let x = <F as Bar>::boo();
}"#,
expect![[r#"
132..163 '{ ... }': Bar::Output<Self>
132..163 '{ ... }': <Self as Bar>::Output
146..153 'loop {}': !
151..153 '{}': ()
306..358 '{ ...o(); }': ()
@ -4213,7 +4213,7 @@ fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
let a = v.get::<T>();
//^ &'a T
let a = v.get::<()>();
//^ Trait::Assoc<impl Trait<Assoc<T> = &'a T>, ()>
//^ <impl Trait<Assoc<T> = &'a T> as Trait>::Assoc<()>
}
fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
let a = v.get::<i32>();
@ -4280,7 +4280,7 @@ where
let a = t.get::<isize>();
//^ usize
let a = t.get::<()>();
//^ Trait::Assoc<T, ()>
//^ <T as Trait>::Assoc<()>
}
"#,
@ -4857,29 +4857,29 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
37..38 'a': T
43..83 '{ ...ait; }': ()
43..83 '{ ...ait; }': impl Future<Output = ()>
53..57 'fut1': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
53..57 'fut1': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
60..61 'a': T
60..64 'a(0)': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
60..64 'a(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
62..63 '0': u32
70..74 'fut1': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
70..74 'fut1': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
70..80 'fut1.await': i32
124..129 'mut b': T
134..174 '{ ...ait; }': ()
134..174 '{ ...ait; }': impl Future<Output = ()>
144..148 'fut2': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
144..148 'fut2': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
151..152 'b': T
151..155 'b(0)': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
151..155 'b(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
153..154 '0': u32
161..165 'fut2': AsyncFnMut::CallRefFuture<'?, T, (u32,)>
161..165 'fut2': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
161..171 'fut2.await': i32
216..217 'c': T
222..262 '{ ...ait; }': ()
222..262 '{ ...ait; }': impl Future<Output = ()>
232..236 'fut3': AsyncFnOnce::CallOnceFuture<T, (u32,)>
232..236 'fut3': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
239..240 'c': T
239..243 'c(0)': AsyncFnOnce::CallOnceFuture<T, (u32,)>
239..243 'c(0)': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
241..242 '0': u32
249..253 'fut3': AsyncFnOnce::CallOnceFuture<T, (u32,)>
249..253 'fut3': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
249..259 'fut3.await': i32
"#]],
);
@ -4947,7 +4947,7 @@ where
"#,
expect![[r#"
84..86 'de': D
135..138 '{ }': Deserializer::Error<'de, D>
135..138 '{ }': <D as Deserializer<'de>>::Error
"#]],
);
}
@ -5020,7 +5020,7 @@ fn main() {
294..298 'iter': Box<dyn Iterator<Item = &'? [u8]> + 'static>
294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + 'static>
152..156 'self': &'? mut Box<I>
177..208 '{ ... }': Option<Iterator::Item<I>>
177..208 '{ ... }': Option<<I as Iterator>::Item>
191..198 'loop {}': !
196..198 '{}': ()
"#]],

View file

@ -4,10 +4,7 @@
use std::{cell::LazyCell, iter};
use base_db::Crate;
use chalk_ir::{
DebruijnIndex,
fold::{FallibleTypeFolder, Shift},
};
use chalk_ir::{DebruijnIndex, fold::FallibleTypeFolder};
use hir_def::{
EnumId, EnumVariantId, FunctionId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId,
db::DefDatabase,
@ -20,6 +17,7 @@ use hir_expand::name::Name;
use intern::sym;
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use smallvec::{SmallVec, smallvec};
use span::Edition;
use stdx::never;
@ -31,6 +29,11 @@ use crate::{
db::HirDatabase,
layout::{Layout, TagEncoding},
mir::pad16,
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, convert_args_for_result},
},
to_chalk_trait_id,
};
pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator<Item = TraitId> + '_ {
@ -191,25 +194,37 @@ fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(
}
fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) {
let interner = DbInterner::new_with(db, None, None);
let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
let trait_self = match generic_params.trait_self_param() {
Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
None => return,
};
db.generic_predicates_for_param(trait_self.parent, trait_self, None)
let trait_ref_args: crate::next_solver::GenericArgs<'_> =
trait_ref.substitution.to_nextsolver(interner);
db.generic_predicates_for_param_ns(trait_self.parent, trait_self, None)
.iter()
.filter_map(|pred| {
pred.as_ref().filter_map(|pred| match pred.skip_binders() {
// FIXME: how to correctly handle higher-ranked bounds here?
WhereClause::Implemented(tr) => Some(
tr.clone()
.shifted_out_to(Interner, DebruijnIndex::ONE)
.expect("FIXME unexpected higher-ranked trait bound"),
),
let pred = pred.kind();
// FIXME: how to correctly handle higher-ranked bounds here?
let pred = pred.no_bound_vars().expect("FIXME unexpected higher-ranked trait bound");
match pred {
rustc_type_ir::ClauseKind::Trait(t) => {
let t =
rustc_type_ir::EarlyBinder::bind(t).instantiate(interner, trait_ref_args);
let trait_id = match t.def_id() {
crate::next_solver::SolverDefId::TraitId(id) => to_chalk_trait_id(id),
_ => unreachable!(),
};
let substitution =
convert_args_for_result(interner, t.trait_ref.args.as_slice());
let tr = chalk_ir::TraitRef { trait_id, substitution };
Some(tr)
}
_ => None,
})
}
})
.map(|pred| pred.substitute(Interner, &trait_ref.substitution))
.for_each(cb);
}

View file

@ -66,7 +66,7 @@ use hir_def::{
},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields},
signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
src::HasSource as _,
visibility::visibility_from_ast,
};
@ -85,7 +85,10 @@ use hir_ty::{
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution,
mir::{MutBorrowKind, interpret_mir},
next_solver::{DbInterner, GenericArgs, SolverDefId, infer::InferCtxt},
next_solver::{
ClauseKind, DbInterner, GenericArgs, SolverDefId, infer::InferCtxt,
mapping::ChalkToNextSolver,
},
primitive::UintTy,
traits::FnTrait,
};
@ -112,6 +115,7 @@ pub use crate::{
VisibleTraits,
},
};
use rustc_type_ir::inherent::{IntoKind, SliceLike};
// Be careful with these re-exports.
//
@ -1385,8 +1389,9 @@ impl Field {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
let interner = DbInterner::new_with(db, None, None);
db.layout_of_ty(
self.ty(db).ty,
self.ty(db).ty.to_nextsolver(interner),
db.trait_environment(match hir_def::VariantId::from(self.parent) {
hir_def::VariantId::EnumVariantId(id) => {
GenericDefId::AdtId(id.lookup(db).parent.into())
@ -1814,12 +1819,15 @@ impl Adt {
}
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
let env = db.trait_environment(self.into());
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
db.layout_of_adt(
self.into(),
TyBuilder::adt(db, self.into())
.fill_with_defaults(db, || TyKind::Error.intern(Interner))
.build_into_subst(),
db.trait_environment(self.into()),
.build_into_subst()
.to_nextsolver(interner),
env,
)
.map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap()))
}
@ -3750,7 +3758,7 @@ impl GenericDef {
push_ty_diagnostics(
db,
acc,
db.generic_predicates_without_parent_with_diagnostics(def).1,
db.generic_predicates_without_parent_with_diagnostics_ns(def).1,
&source_map,
);
for (param_id, param) in generics.iter_type_or_consts() {
@ -4240,11 +4248,15 @@ impl TypeParam {
/// parameter, not additional bounds that might be added e.g. by a method if
/// the parameter comes from an impl!
pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
db.generic_predicates_for_param_ns(self.id.parent(), self.id.into(), None)
.iter()
.filter_map(|pred| match &pred.skip_binders().skip_binders() {
hir_ty::WhereClause::Implemented(trait_ref) => {
Some(Trait::from(trait_ref.hir_trait_id()))
.filter_map(|pred| match &pred.kind().skip_binder() {
ClauseKind::Trait(trait_ref) => {
let trait_ = match trait_ref.def_id() {
SolverDefId::TraitId(t) => t,
_ => unreachable!(),
};
Some(Trait::from(trait_))
}
_ => None,
})
@ -4501,14 +4513,17 @@ impl Impl {
}
pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
let trait_ref = db.impl_trait(self.id)?;
let id = trait_ref.skip_binders().hir_trait_id();
let trait_ref = db.impl_trait_ns(self.id)?;
let id = trait_ref.skip_binder().def_id;
let id = match id {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
Some(Trait { id })
}
pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef<'_>> {
let substs = TyBuilder::placeholder_subst(db, self.id);
let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs);
let trait_ref = db.impl_trait_ns(self.id)?.instantiate_identity();
let resolver = self.id.resolver(db);
Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
}
@ -4577,7 +4592,7 @@ impl Impl {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TraitRef<'db> {
env: Arc<TraitEnvironment>,
trait_ref: hir_ty::TraitRef,
trait_ref: hir_ty::next_solver::TraitRef<'db>,
_pd: PhantomCovariantLifetime<'db>,
}
@ -4585,7 +4600,7 @@ impl<'db> TraitRef<'db> {
pub(crate) fn new_with_resolver(
db: &'db dyn HirDatabase,
resolver: &Resolver<'_>,
trait_ref: hir_ty::TraitRef,
trait_ref: hir_ty::next_solver::TraitRef<'db>,
) -> Self {
let env = resolver
.generic_def()
@ -4594,25 +4609,26 @@ impl<'db> TraitRef<'db> {
}
pub fn trait_(&self) -> Trait {
let id = self.trait_ref.hir_trait_id();
let id = match self.trait_ref.def_id {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
Trait { id }
}
pub fn self_ty(&self) -> Type<'_> {
let ty = self.trait_ref.self_type_parameter(Interner);
Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
pub fn self_ty(&self) -> TypeNs<'_> {
let ty = self.trait_ref.self_ty();
TypeNs { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
}
/// Returns `idx`-th argument of this trait reference if it is a type argument. Note that the
/// first argument is the `Self` type.
pub fn get_type_argument(&self, idx: usize) -> Option<Type<'db>> {
self.trait_ref
.substitution
.as_slice(Interner)
.get(idx)
.and_then(|arg| arg.ty(Interner))
.cloned()
.map(|ty| Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() })
pub fn get_type_argument(&self, idx: usize) -> Option<TypeNs<'db>> {
self.trait_ref.args.as_slice().get(idx).and_then(|arg| arg.ty()).map(|ty| TypeNs {
env: self.env.clone(),
ty,
_pd: PhantomCovariantLifetime::new(),
})
}
}
@ -4929,42 +4945,79 @@ impl<'db> Type<'db> {
}
pub fn contains_reference(&self, db: &'db dyn HirDatabase) -> bool {
return go(db, self.env.krate, &self.ty);
return go(db, &self.ty);
fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool {
fn is_phantom_data(db: &dyn HirDatabase, adt_id: AdtId) -> bool {
match adt_id {
hir_def::AdtId::StructId(s) => {
let flags = db.struct_signature(s).flags;
flags.contains(StructFlags::IS_PHANTOM_DATA)
}
hir_def::AdtId::UnionId(_) => false,
hir_def::AdtId::EnumId(_) => false,
}
}
fn go(db: &dyn HirDatabase, ty: &Ty) -> bool {
match ty.kind(Interner) {
// Reference itself
TyKind::Ref(_, _, _) => true,
// For non-phantom_data adts we check variants/fields as well as generic parameters
TyKind::Adt(adt_id, substitution)
if !db.adt_datum(krate, *adt_id).flags.phantom_data =>
{
let adt_datum = &db.adt_datum(krate, *adt_id);
let adt_datum_bound =
adt_datum.binders.clone().substitute(Interner, substitution);
adt_datum_bound
.variants
TyKind::Adt(adt_id, substitution) if !is_phantom_data(db, adt_id.0) => {
let _variant_id_to_fields = |id: VariantId| {
let variant_data = &id.fields(db);
if variant_data.fields().is_empty() {
vec![]
} else {
let field_types = db.field_types(id);
variant_data
.fields()
.iter()
.map(|(idx, _)| {
field_types[idx].clone().substitute(Interner, substitution)
})
.filter(|it| !it.contains_unknown())
.collect()
}
};
let variant_id_to_fields = |_: VariantId| vec![];
let variants = match adt_id.0 {
hir_def::AdtId::StructId(id) => {
vec![variant_id_to_fields(id.into())]
}
hir_def::AdtId::EnumId(id) => id
.enum_variants(db)
.variants
.iter()
.map(|&(variant_id, _, _)| variant_id_to_fields(variant_id.into()))
.collect(),
hir_def::AdtId::UnionId(id) => {
vec![variant_id_to_fields(id.into())]
}
};
variants
.into_iter()
.flat_map(|variant| variant.fields.into_iter())
.any(|ty| go(db, krate, &ty))
.flat_map(|variant| variant.into_iter())
.any(|ty| go(db, &ty))
|| substitution
.iter(Interner)
.filter_map(|x| x.ty(Interner))
.any(|ty| go(db, krate, ty))
.any(|ty| go(db, ty))
}
// And for `PhantomData<T>`, we check `T`.
TyKind::Adt(_, substitution)
| TyKind::Tuple(_, substitution)
| TyKind::OpaqueType(_, substitution)
| TyKind::AssociatedType(_, substitution)
| TyKind::FnDef(_, substitution) => substitution
.iter(Interner)
.filter_map(|x| x.ty(Interner))
.any(|ty| go(db, krate, ty)),
| TyKind::FnDef(_, substitution) => {
substitution.iter(Interner).filter_map(|x| x.ty(Interner)).any(|ty| go(db, ty))
}
// For `[T]` or `*T` we check `T`
TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty),
TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, ty),
// Consider everything else as not reference
_ => false,
@ -5895,7 +5948,8 @@ impl<'db> Type<'db> {
}
pub fn layout(&self, db: &'db dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(self.ty.clone(), self.env.clone())
let interner = DbInterner::new_with(db, None, None);
db.layout_of_ty(self.ty.to_nextsolver(interner), self.env.clone())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}

View file

@ -2288,18 +2288,19 @@ impl<'db> SemanticsScope<'db> {
/// Iterates over associated types that may be specified after the given path (using
/// `Ty::Assoc` syntax).
pub fn assoc_type_shorthand_candidates<R>(
pub fn assoc_type_shorthand_candidates(
&self,
resolution: &PathResolution,
mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
) -> Option<R> {
let def = self.resolver.generic_def()?;
hir_ty::associated_type_shorthand_candidates(
self.db,
def,
resolution.in_type_ns()?,
|name, id| cb(name, id.into()),
)
mut cb: impl FnMut(TypeAlias),
) {
let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
else {
return;
};
hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
cb(id.into());
false
});
}
pub fn generic_def(&self) -> Option<crate::GenericDef> {

View file

@ -14,6 +14,7 @@ use crate::{
db::HirDatabase,
semantics::{PathResolution, PathResolutionPerNs},
};
use base_db::salsa;
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId,
@ -1593,12 +1594,14 @@ fn resolve_hir_path_(
Some(unresolved) => resolver
.generic_def()
.and_then(|def| {
hir_ty::associated_type_shorthand_candidates(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then_some(id),
)
salsa::attach(db, || {
hir_ty::associated_type_shorthand_candidates(
db,
def,
res.in_type_ns()?,
|name, _| name == unresolved.name,
)
})
})
.map(TypeAlias::from)
.map(Into::into)
@ -1746,7 +1749,7 @@ fn resolve_hir_path_qualifier(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then_some(id),
|name, _| name == unresolved.name,
)
})
.map(TypeAlias::from)

View file

@ -140,9 +140,8 @@ pub(crate) fn complete_expr_path(
Qualified::With { resolution: None, .. } => {}
Qualified::With { resolution: Some(resolution), .. } => {
// Add associated types on type parameters and `Self`.
ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
ctx.scope.assoc_type_shorthand_candidates(resolution, |alias| {
acc.add_type_alias(ctx, alias);
None::<()>
});
match resolution {
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {

View file

@ -77,9 +77,8 @@ pub(crate) fn complete_type_path(
Qualified::With { resolution: None, .. } => {}
Qualified::With { resolution: Some(resolution), .. } => {
// Add associated types on type parameters and `Self`.
ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
ctx.scope.assoc_type_shorthand_candidates(resolution, |alias| {
acc.add_type_alias(ctx, alias);
None::<()>
});
match resolution {

View file

@ -1,3 +1,4 @@
use base_db::salsa;
use expect_test::{Expect, expect};
use hir::HirDisplay;
@ -13,7 +14,11 @@ fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str,
let ty = completion_context
.expected_type
.map(|t| t.display_test(&db, completion_context.krate.to_display_target(&db)).to_string())
.map(|t| {
salsa::attach(&db, || {
t.display_test(&db, completion_context.krate.to_display_target(&db)).to_string()
})
})
.unwrap_or("?".to_owned());
let name =

View file

@ -137,18 +137,20 @@ pub(crate) fn hover(
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db);
let mut res = if range.is_empty() {
hover_offset(
sema,
FilePosition { file_id, offset: range.start() },
file,
config,
edition,
display_target,
)
} else {
hover_ranged(sema, frange, file, config, edition, display_target)
}?;
let mut res = salsa::attach(sema.db, || {
if range.is_empty() {
hover_offset(
sema,
FilePosition { file_id, offset: range.start() },
file,
config,
edition,
display_target,
)
} else {
hover_ranged(sema, frange, file, config, edition, display_target)
}
})?;
if let HoverDocFormat::PlainText = config.format {
res.info.markup = remove_markdown(res.info.markup.as_str()).into();
@ -581,11 +583,13 @@ fn goto_type_action_for_def(
});
}
if let Ok(generic_def) = GenericDef::try_from(def) {
generic_def.type_or_const_params(db).into_iter().for_each(|it| {
walk_and_push_ty(db, &it.ty(db), &mut push_new_def);
});
}
salsa::attach(db, || {
if let Ok(generic_def) = GenericDef::try_from(def) {
generic_def.type_or_const_params(db).into_iter().for_each(|it| {
walk_and_push_ty(db, &it.ty(db), &mut push_new_def);
});
}
});
let ty = match def {
Definition::Local(it) => Some(it.ty(db)),

View file

@ -912,7 +912,7 @@ pub(super) fn literal(
};
let ty = ty.display(sema.db, display_target);
let mut s = format!("```rust\n{ty}\n```\n___\n\n");
let mut s = salsa::attach(sema.db, || format!("```rust\n{ty}\n```\n___\n\n"));
match value {
Ok(value) => {
let backtick_len = value.chars().filter(|c| *c == '`').count();

View file

@ -738,44 +738,46 @@ fn label_of_ty(
config: &InlayHintsConfig,
display_target: DisplayTarget,
) -> Result<(), HirDisplayError> {
let iter_item_type = hint_iterator(sema, famous_defs, ty);
match iter_item_type {
Some((iter_trait, item, ty)) => {
const LABEL_START: &str = "impl ";
const LABEL_ITERATOR: &str = "Iterator";
const LABEL_MIDDLE: &str = "<";
const LABEL_ITEM: &str = "Item";
const LABEL_MIDDLE2: &str = " = ";
const LABEL_END: &str = ">";
salsa::attach(sema.db, || {
let iter_item_type = hint_iterator(sema, famous_defs, ty);
match iter_item_type {
Some((iter_trait, item, ty)) => {
const LABEL_START: &str = "impl ";
const LABEL_ITERATOR: &str = "Iterator";
const LABEL_MIDDLE: &str = "<";
const LABEL_ITEM: &str = "Item";
const LABEL_MIDDLE2: &str = " = ";
const LABEL_END: &str = ">";
max_length = max_length.map(|len| {
len.saturating_sub(
LABEL_START.len()
+ LABEL_ITERATOR.len()
+ LABEL_MIDDLE.len()
+ LABEL_MIDDLE2.len()
+ LABEL_END.len(),
)
});
max_length = max_length.map(|len| {
len.saturating_sub(
LABEL_START.len()
+ LABEL_ITERATOR.len()
+ LABEL_MIDDLE.len()
+ LABEL_MIDDLE2.len()
+ LABEL_END.len(),
)
});
label_builder.write_str(LABEL_START)?;
label_builder.start_location_link(ModuleDef::from(iter_trait).into());
label_builder.write_str(LABEL_ITERATOR)?;
label_builder.end_location_link();
label_builder.write_str(LABEL_MIDDLE)?;
label_builder.start_location_link(ModuleDef::from(item).into());
label_builder.write_str(LABEL_ITEM)?;
label_builder.end_location_link();
label_builder.write_str(LABEL_MIDDLE2)?;
rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?;
label_builder.write_str(LABEL_END)?;
Ok(())
label_builder.write_str(LABEL_START)?;
label_builder.start_location_link(ModuleDef::from(iter_trait).into());
label_builder.write_str(LABEL_ITERATOR)?;
label_builder.end_location_link();
label_builder.write_str(LABEL_MIDDLE)?;
label_builder.start_location_link(ModuleDef::from(item).into());
label_builder.write_str(LABEL_ITEM)?;
label_builder.end_location_link();
label_builder.write_str(LABEL_MIDDLE2)?;
rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?;
label_builder.write_str(LABEL_END)?;
Ok(())
}
None => ty
.display_truncated(sema.db, max_length, display_target)
.with_closure_style(config.closure_style)
.write_to(label_builder),
}
None => ty
.display_truncated(sema.db, max_length, display_target)
.with_closure_style(config.closure_style)
.write_to(label_builder),
}
})
}
let mut label_builder = InlayHintLabelBuilder {

View file

@ -10,7 +10,7 @@ use hir::{
Adjust, Adjustment, AutoBorrow, DisplayTarget, HirDisplay, Mutability, OverloadedDeref,
PointerCast, Safety,
};
use ide_db::famous_defs::FamousDefs;
use ide_db::{base_db::salsa, famous_defs::FamousDefs};
use ide_db::text_edit::TextEditBuilder;
use syntax::ast::{self, AstNode, prec::ExprPrecedence};
@ -201,13 +201,15 @@ pub(super) fn hints(
text: if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() },
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
InlayTooltip::Markdown(format!(
"`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
coercion,
detailed_tooltip
))
salsa::attach(sema.db, || {
InlayTooltip::Markdown(format!(
"`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
coercion,
detailed_tooltip
))
})
})),
};
if postfix { &mut post } else { &mut pre }.label.append_part(label);

View file

@ -67,7 +67,7 @@ use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
salsa::Cancelled,
salsa::{self, Cancelled},
},
prime_caches, symbol_index,
};
@ -461,7 +461,9 @@ impl Analysis {
hasher: impl Fn(&InlayHint) -> u64 + Send + UnwindSafe,
) -> Cancellable<Option<InlayHint>> {
self.with_db(|db| {
inlay_hints::inlay_hints_resolve(db, file_id, resolve_range, hash, config, hasher)
salsa::attach(db, || {
inlay_hints::inlay_hints_resolve(db, file_id, resolve_range, hash, config, hasher)
})
})
}
@ -526,7 +528,9 @@ impl Analysis {
let search_scope = AssertUnwindSafe(search_scope);
self.with_db(|db| {
let _ = &search_scope;
references::find_all_refs(&Semantics::new(db), position, search_scope.0)
salsa::attach(db, || {
references::find_all_refs(&Semantics::new(db), position, search_scope.0)
})
})
}
@ -536,7 +540,7 @@ impl Analysis {
config: &HoverConfig,
range: FileRange,
) -> Cancellable<Option<RangeInfo<HoverResult>>> {
self.with_db(|db| hover::hover(db, range, config))
self.with_db(|db| salsa::attach(db, || hover::hover(db, range, config)))
}
/// Returns moniker of symbol at position.
@ -544,7 +548,7 @@ impl Analysis {
&self,
position: FilePosition,
) -> Cancellable<Option<RangeInfo<Vec<moniker::MonikerResult>>>> {
self.with_db(|db| moniker::moniker(db, position))
self.with_db(|db| salsa::attach(db, || moniker::moniker(db, position)))
}
/// Returns URL(s) for the documentation of the symbol under the cursor.
@ -572,7 +576,7 @@ impl Analysis {
&self,
position: FilePosition,
) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
self.with_db(|db| salsa::attach(db, || call_hierarchy::call_hierarchy(db, position)))
}
/// Computes incoming calls for the given file position.
@ -640,7 +644,7 @@ impl Analysis {
/// Returns the set of possible targets to run for the current file.
pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
self.with_db(|db| runnables::runnables(db, file_id))
self.with_db(|db| salsa::attach(db, || runnables::runnables(db, file_id)))
}
/// Returns the set of tests for the given file position.
@ -672,7 +676,9 @@ impl Analysis {
position: FilePosition,
) -> Cancellable<Option<Vec<HighlightedRange>>> {
self.with_db(|db| {
highlight_related::highlight_related(&Semantics::new(db), config, position)
salsa::attach(db, || {
highlight_related::highlight_related(&Semantics::new(db), config, position)
})
})
}

View file

@ -10,6 +10,7 @@ use hir::{
};
use ide_db::{
FileId, FileRange, RootDatabase, SymbolKind,
base_db::salsa,
defs::Definition,
documentation::{Documentation, HasDocs},
};
@ -377,8 +378,9 @@ where
)
.map(|mut res| {
res.docs = self.docs(db);
res.description =
Some(self.display(db, self.krate(db).to_display_target(db)).to_string());
res.description = salsa::attach(db, || {
Some(self.display(db, self.krate(db).to_display_target(db)).to_string())
});
res.container_name = self.container_name(db);
res
}),
@ -485,8 +487,9 @@ impl TryToNav for hir::Field {
NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
|mut res| {
res.docs = self.docs(db);
res.description =
Some(self.display(db, krate.to_display_target(db)).to_string());
res.description = salsa::attach(db, || {
Some(self.display(db, krate.to_display_target(db)).to_string())
});
res
},
)

View file

@ -10,7 +10,7 @@ use hir::{
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
base_db::RootQueryDb,
base_db::{RootQueryDb, salsa},
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,
@ -414,11 +414,13 @@ pub(crate) fn runnable_impl(
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.generic_parameters(sema.db, display_target).peekable();
let params = if ty_args.peek().is_some() {
format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)))
} else {
String::new()
};
let params = salsa::attach(sema.db, || {
if ty_args.peek().is_some() {
format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)))
} else {
String::new()
}
});
let mut test_id = format!("{}{params}", adt_name.display(sema.db, edition));
test_id.retain(|c| c != ' ');
let test_id = TestId::Path(test_id);
@ -521,7 +523,9 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
let mut ty_args = ty.generic_parameters(db, display_target).peekable();
format_to!(path, "{}", name.display(db, edition));
if ty_args.peek().is_some() {
format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)));
salsa::attach(db, || {
format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)));
});
}
format_to!(path, "::{}", def_name.display(db, edition));
path.retain(|c| c != ' ');

View file

@ -11,6 +11,7 @@ use hir::{
use ide_db::{
FilePosition, FxIndexMap,
active_parameter::{callable_for_arg_list, generic_def_for_node},
base_db::salsa,
documentation::{Documentation, HasDocs},
};
use itertools::Itertools;
@ -266,12 +267,12 @@ fn signature_help_for_call(
// In that case, fall back to render definitions of the respective parameters.
// This is overly conservative: we do not substitute known type vars
// (see FIXME in tests::impl_trait) and falling back on any unknowns.
match (p.ty().contains_unknown(), fn_params.as_deref()) {
salsa::attach(db, || match (p.ty().contains_unknown(), fn_params.as_deref()) {
(true, Some(fn_params)) => {
format_to!(buf, "{}", fn_params[idx].ty().display(db, display_target))
}
_ => format_to!(buf, "{}", p.ty().display(db, display_target)),
}
});
res.push_call_param(&buf);
}
}

View file

@ -5,7 +5,7 @@ use arrayvec::ArrayVec;
use hir::{Crate, Module, Semantics, db::HirDatabase};
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
base_db::{RootQueryDb, SourceDatabase, VfsPath},
base_db::{RootQueryDb, SourceDatabase, VfsPath, salsa},
defs::{Definition, IdentClass},
documentation::Documentation,
famous_defs::FamousDefs,
@ -227,30 +227,32 @@ impl StaticIndex<'_> {
let id = if let Some(it) = self.def_map.get(&def) {
*it
} else {
let it = self.tokens.insert(TokenStaticData {
documentation: documentation_for_definition(&sema, def, scope_node),
hover: Some(hover_for_definition(
&sema,
file_id,
def,
None,
scope_node,
None,
false,
&hover_config,
edition,
display_target,
)),
definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| {
FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
}),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
display_name: def
.name(self.db)
.map(|name| name.display(self.db, edition).to_string()),
signature: Some(def.label(self.db, display_target)),
kind: def_to_kind(self.db, def),
let it = salsa::attach(sema.db, || {
self.tokens.insert(TokenStaticData {
documentation: documentation_for_definition(&sema, def, scope_node),
hover: Some(hover_for_definition(
&sema,
file_id,
def,
None,
scope_node,
None,
false,
&hover_config,
edition,
display_target,
)),
definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(
|it| FileRange { file_id: it.file_id, range: it.focus_or_full_range() },
),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
display_name: def
.name(self.db)
.map(|name| name.display(self.db, edition).to_string()),
signature: Some(def.label(self.db, display_target)),
kind: def_to_kind(self.db, def),
})
});
self.def_map.insert(def, it);
it

View file

@ -3,6 +3,7 @@ use std::fmt;
use hir::{DisplayTarget, Field, HirDisplay, Layout, Semantics, Type};
use ide_db::{
RootDatabase,
base_db::salsa,
defs::Definition,
helpers::{get_definition, pick_best_token},
};
@ -138,10 +139,12 @@ pub(crate) fn view_memory_layout(
nodes[parent_idx].children_len = fields.len() as u64;
for (field, child_ty) in fields.iter() {
if let Ok(child_layout) = child_ty.layout(db) {
if let Ok(child_layout) = salsa::attach(db, || child_ty.layout(db)) {
nodes.push(MemoryLayoutNode {
item_name: field.name(db),
typename: child_ty.display(db, display_target).to_string(),
typename: salsa::attach(db, || {
child_ty.display(db, display_target).to_string()
}),
size: child_layout.size(),
alignment: child_layout.align(),
offset: match *field {
@ -169,13 +172,13 @@ pub(crate) fn view_memory_layout(
}
for (i, (_, child_ty)) in fields.iter().enumerate() {
if let Ok(child_layout) = child_ty.layout(db) {
if let Ok(child_layout) = salsa::attach(db, || child_ty.layout(db)) {
read_layout(nodes, db, child_ty, &child_layout, children_start + i, display_target);
}
}
}
ty.layout(db)
salsa::attach(db, || ty.layout(db))
.map(|layout| {
let item_name = match def {
// def is a datatype
@ -188,7 +191,7 @@ pub(crate) fn view_memory_layout(
def => def.name(db).map(|n| n.as_str().to_owned()).unwrap_or("[ROOT]".to_owned()),
};
let typename = ty.display(db, display_target).to_string();
let typename = salsa::attach(db, || ty.display(db, display_target).to_string());
let mut nodes = vec![MemoryLayoutNode {
item_name,

View file

@ -10,15 +10,17 @@ use std::{
use cfg::{CfgAtom, CfgDiff};
use hir::{
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ImportPathConfig, ModuleDef, Name,
Adt, AssocItem, Crate, DefWithBody, HasCrate, HasSource, HirDisplay, ImportPathConfig,
ModuleDef, Name,
db::{DefDatabase, ExpandDatabase, HirDatabase},
next_solver::{DbInterner, GenericArgs},
};
use hir_def::{
SyntheticSyntax,
expr_store::BodySourceMap,
hir::{ExprId, PatId},
};
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
use hir_ty::{Interner, TyExt, TypeFlags};
use ide::{
Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
InlayHintsConfig, LineCol, RootDatabase,
@ -361,6 +363,7 @@ impl flags::AnalysisStats {
let mut all = 0;
let mut fail = 0;
for &a in adts {
let interner = DbInterner::new_with(db, Some(a.krate(db).base()), None);
let generic_params = db.generic_params(a.into());
if generic_params.iter_type_or_consts().next().is_some()
|| generic_params.iter_lt().next().is_some()
@ -371,7 +374,7 @@ impl flags::AnalysisStats {
all += 1;
let Err(e) = db.layout_of_adt(
hir_def::AdtId::from(a),
Substitution::empty(Interner),
GenericArgs::new_from_iter(interner, []),
db.trait_environment(a.into()),
) else {
continue;