Auto merge of #147210 - lnicola:sync-from-ra, r=lnicola

`rust-analyzer` subtree update

Subtree update of `rust-analyzer` to a6bc4a4bbe.

Created using https://github.com/rust-lang/josh-sync.

r? `@ghost`
This commit is contained in:
bors 2025-10-01 21:58:22 +00:00
commit 3369e82c6b
118 changed files with 3732 additions and 1515 deletions

View file

@ -56,8 +56,8 @@ jobs:
# Install a pinned rustc commit to avoid surprises
- name: Install Rust toolchain
run: |
RUSTC_VERSION=`cat rust-version`
rustup-toolchain-install-master ${RUSTC_VERSION} -c rust-src -c rustfmt
RUSTC_VERSION=$(cat rust-version)
rustup-toolchain-install-master ${RUSTC_VERSION} -c cargo -c rust-src -c rustfmt
rustup default ${RUSTC_VERSION}
# Emulate a nightly toolchain, because the toolchain installed above does not have "nightly"
@ -98,9 +98,9 @@ jobs:
run: |
rustup update --no-self-update stable
rustup default stable
rustup component add --toolchain stable rust-src clippy
# We always use a nightly rustfmt, regardless of channel, because we need
# --file-lines.
rustup component add --toolchain stable rust-src clippy rustfmt
# We also install a nightly rustfmt, because we use `--file-lines` in
# a test.
rustup toolchain install nightly --profile minimal --component rustfmt
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
- name: Install Rust Problem Matcher

View file

@ -545,6 +545,12 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "fixedbitset"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]]
name = "flate2"
version = "1.1.2"
@ -775,6 +781,7 @@ dependencies = [
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"oorandom",
"petgraph",
"project-model",
"query-group-macro",
"ra-ap-rustc_abi",
@ -1327,9 +1334,9 @@ dependencies = [
[[package]]
name = "memchr"
version = "2.7.5"
version = "2.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
[[package]]
name = "memmap2"
@ -1594,6 +1601,17 @@ dependencies = [
"libc",
]
[[package]]
name = "petgraph"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54acf3a685220b533e437e264e4d932cfbdc4cc7ec0cd232ed73c08d03b8a7ca"
dependencies = [
"fixedbitset",
"hashbrown 0.15.4",
"indexmap",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"

View file

@ -170,6 +170,7 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features =
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.5.4"
xshell = "0.2.7"
petgraph = { version = "0.8.2", default-features = false }
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] }

View file

@ -34,6 +34,7 @@ rustc_apfloat = "0.2.3"
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
petgraph.workspace = true
ra-ap-rustc_abi.workspace = true
ra-ap-rustc_index.workspace = true

View file

@ -32,11 +32,11 @@ const AUTODEREF_RECURSION_LIMIT: usize = 20;
/// - the yielded types don't contain inference variables (but may contain `TyKind::Error`).
/// - a type won't be yielded more than once; in other words, the returned iterator will stop if it
/// detects a cycle in the deref chain.
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
pub fn autoderef<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
ty: crate::Canonical<crate::Ty>,
) -> impl Iterator<Item = crate::Ty> {
) -> impl Iterator<Item = crate::Ty> + use<> {
let mut table = InferenceTable::new(db, env);
let interner = table.interner;
let ty = table.instantiate_canonical(ty);
@ -298,7 +298,7 @@ fn structurally_normalize_ty<'db>(
) -> Option<(Ty<'db>, PredicateObligations<'db>)> {
let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
let Ok(normalized_ty) =
ocx.structurally_normalize_ty(&ObligationCause::misc(), table.param_env, ty)
ocx.structurally_normalize_ty(&ObligationCause::misc(), table.trait_env.env, ty)
else {
// We shouldn't have errors here in the old solver, except for
// evaluate/fulfill mismatches, but that's not a reason for an ICE.

View file

@ -3,17 +3,20 @@
use chalk_ir::{
AdtId, DebruijnIndex, Scalar,
cast::{Cast, CastTo, Caster},
fold::TypeFoldable,
interner::HasInterner,
};
use hir_def::{GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType};
use smallvec::SmallVec;
use crate::{
Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy,
Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, consteval::unknown_const_as_generic,
db::HirDatabase, error_lifetime, generics::generics, infer::unify::InferenceTable, primitive,
to_assoc_type_id, to_chalk_trait_id,
BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution,
TraitRef, Ty, TyDefId, TyExt, TyKind,
consteval::unknown_const_as_generic,
db::HirDatabase,
error_lifetime,
generics::generics,
infer::unify::InferenceTable,
next_solver::{DbInterner, EarlyBinder, mapping::ChalkToNextSolver},
primitive, to_assoc_type_id, to_chalk_trait_id,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -345,19 +348,20 @@ impl TyBuilder<TypeAliasId> {
}
}
impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Binders<T>> {
pub fn build(self) -> T {
impl<'db, T: rustc_type_ir::TypeFoldable<DbInterner<'db>>> TyBuilder<EarlyBinder<'db, T>> {
pub fn build(self, interner: DbInterner<'db>) -> T {
let (b, subst) = self.build_internal();
b.substitute(Interner, &subst)
let args: crate::next_solver::GenericArgs<'db> = subst.to_nextsolver(interner);
b.instantiate(interner, args)
}
}
impl TyBuilder<Binders<Ty>> {
impl<'db> TyBuilder<EarlyBinder<'db, crate::next_solver::Ty<'db>>> {
pub fn def_ty(
db: &dyn HirDatabase,
db: &'db dyn HirDatabase,
def: TyDefId,
parent_subst: Option<Substitution>,
) -> TyBuilder<Binders<Ty>> {
) -> TyBuilder<EarlyBinder<'db, crate::next_solver::Ty<'db>>> {
let poly_ty = db.ty(def);
let id: GenericDefId = match def {
TyDefId::BuiltinType(_) => {
@ -370,7 +374,10 @@ impl TyBuilder<Binders<Ty>> {
TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_ty)
}
pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
pub fn impl_self_ty(
db: &'db dyn HirDatabase,
def: hir_def::ImplId,
) -> TyBuilder<EarlyBinder<'db, crate::next_solver::Ty<'db>>> {
TyBuilder::subst_for_def(db, def, None).with_data(db.impl_self_ty(def))
}
}

View file

@ -15,8 +15,13 @@ use crate::{
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, ToChalk, TraitRef, Ty, TyBuilder, TyKind, TypeFlags,
WhereClause, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst,
WhereClause,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
generics::generics,
next_solver::{DbInterner, mapping::NextSolverToChalk},
to_chalk_trait_id,
utils::ClosureSubst,
};
pub trait TyExt {
@ -372,7 +377,10 @@ impl TyExt for Ty {
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
let env = db.trait_environment_for_body(owner);
let goal = Canonical {
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
value: InEnvironment::new(
&env.env.to_chalk(DbInterner::new_with(db, Some(env.krate), env.block)),
trait_ref.cast(Interner),
),
binders: CanonicalVarKinds::empty(Interner),
};
!db.trait_solve(crate_id, None, goal).no_solution()

View file

@ -229,7 +229,7 @@ pub(crate) fn const_eval_cycle_result(
_: &dyn HirDatabase,
_: GeneralConstId,
_: Substitution,
_: Option<Arc<TraitEnvironment>>,
_: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@ -252,7 +252,7 @@ pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
def: GeneralConstId,
subst: Substitution,
trait_env: Option<Arc<TraitEnvironment>>,
trait_env: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError> {
let body = match def {
GeneralConstId::ConstId(c) => {
@ -327,7 +327,7 @@ pub(crate) fn eval_to_const(
debruijn: DebruijnIndex,
) -> Const {
let db = ctx.db;
let infer = ctx.clone().resolve_all();
let infer = ctx.fixme_resolve_all_clone();
fn has_closure(body: &Body, expr: ExprId) -> bool {
if matches!(body[expr], Expr::Closure { .. }) {
return true;

View file

@ -36,12 +36,12 @@ fn check_fail(
error: impl FnOnce(ConstEvalError) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
match eval_goal(&db, file_id) {
salsa::attach(&db, || match eval_goal(&db, file_id) {
Ok(_) => panic!("Expected fail, but it succeeded"),
Err(e) => {
assert!(error(simplify(e.clone())), "Actual error was: {}", pretty_print_err(e, db))
assert!(error(simplify(e.clone())), "Actual error was: {}", pretty_print_err(e, &db))
}
}
})
}
#[track_caller]
@ -79,36 +79,38 @@ fn check_answer(
check: impl FnOnce(&[u8], &MemoryMap<'_>),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
Err(e) => {
let err = pretty_print_err(e, db);
panic!("Error in evaluating goal: {err}");
}
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, mm) => {
check(b, mm);
salsa::attach(&db, || {
let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
Err(e) => {
let err = pretty_print_err(e, &db);
panic!("Error in evaluating goal: {err}");
}
x => panic!("Expected number but found {x:?}"),
},
_ => panic!("result of const eval wasn't a concrete const"),
}
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, mm) => {
check(b, mm);
}
x => panic!("Expected number but found {x:?}"),
},
_ => panic!("result of const eval wasn't a concrete const"),
}
});
}
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let display_target =
DisplayTarget::from_crate(&db, *db.all_crates().last().expect("no crate graph present"));
DisplayTarget::from_crate(db, *db.all_crates().last().expect("no crate graph present"));
match e {
ConstEvalError::MirLowerError(e) => {
e.pretty_print(&mut err, &db, span_formatter, display_target)
e.pretty_print(&mut err, db, span_formatter, display_target)
}
ConstEvalError::MirEvalError(e) => {
e.pretty_print(&mut err, &db, span_formatter, display_target)
e.pretty_print(&mut err, db, span_formatter, display_target)
}
}
.unwrap();

View file

@ -222,7 +222,7 @@ pub(crate) fn const_eval_discriminant_variant(
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'db>) -> Const<'db> {
let interner = DbInterner::new_with(ctx.db, None, None);
let infer = ctx.clone().resolve_all();
let infer = ctx.fixme_resolve_all_clone();
fn has_closure(body: &Body, expr: ExprId) -> bool {
if matches!(body[expr], Expr::Closure { .. }) {
return true;

View file

@ -16,8 +16,8 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
Binders, Const, ImplTraitId, ImplTraits, InferenceResult, PolyFnSig, Substitution,
TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db,
Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Substitution, TraitEnvironment, Ty,
TyDefId, ValueTyDefId, chalk_db,
consteval::ConstEvalError,
drop::DropGlue,
dyn_compatibility::DynCompatibilityViolation,
@ -49,7 +49,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&self,
def: DefWithBodyId,
subst: Substitution,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
@ -57,7 +57,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&self,
def: InternedClosureId,
subst: Substitution,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
@ -70,7 +70,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&self,
def: GeneralConstId,
subst: Substitution,
trait_env: Option<Arc<TraitEnvironment>>,
trait_env: Option<Arc<TraitEnvironment<'_>>>,
) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
@ -84,7 +84,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
&self,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution);
@ -97,7 +97,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&'db self,
def: AdtId,
args: crate::next_solver::GenericArgs<'db>,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
@ -105,7 +105,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn layout_of_ty<'db>(
&'db self,
ty: crate::next_solver::Ty<'db>,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
@ -114,55 +114,94 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::invoke(crate::lower_nextsolver::ty_query)]
#[salsa::transparent]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
fn ty<'db>(
&'db self,
def: TyDefId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>;
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::type_for_type_alias_with_diagnostics_cycle_result)]
fn type_for_type_alias_with_diagnostics<'db>(
&'db self,
def: TypeAliasId,
) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics);
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
#[salsa::invoke(crate::lower_nextsolver::value_ty_query)]
fn value_ty<'db>(
&'db self,
def: ValueTyDefId,
) -> Option<crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>>;
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)]
fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::impl_self_ty_with_diagnostics_cycle_result)]
fn impl_self_ty_with_diagnostics<'db>(
&'db self,
def: ImplId,
) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics);
#[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::invoke(crate::lower_nextsolver::impl_self_ty_query)]
#[salsa::transparent]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
fn impl_self_ty<'db>(
&'db self,
def: ImplId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke_interned(crate::lower_nextsolver::const_param_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::const_param_ty_with_diagnostics_cycle_result)]
fn const_param_ty_with_diagnostics<'db>(
&'db self,
def: ConstParamId,
) -> (crate::next_solver::Ty<'db>, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)]
#[salsa::transparent]
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_query)]
#[salsa::cycle(cycle_result = crate::lower::const_param_ty_cycle_result)]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
#[salsa::invoke(crate::lower_nextsolver::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics<'db>(
&'db self,
def: ImplId,
) -> Option<(
crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>,
Diagnostics,
)>;
#[salsa::invoke(crate::lower::impl_trait_query)]
#[salsa::invoke(crate::lower_nextsolver::impl_trait_query)]
#[salsa::transparent]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
fn impl_trait<'db>(
&'db self,
def: ImplId,
) -> Option<crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>>;
#[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics(
&self,
#[salsa::invoke(crate::lower_nextsolver::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics<'db>(
&'db self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
) -> (
Arc<
ArenaMap<
LocalFieldId,
crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>,
>,
>,
Diagnostics,
);
#[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
#[salsa::invoke(crate::lower::callable_item_signature_query)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
#[salsa::invoke(crate::lower_nextsolver::callable_item_signature_query)]
fn callable_item_signature<'db>(
&'db self,
def: CallableDefId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::PolyFnSig<'db>>;
#[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
@ -182,12 +221,28 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke(
crate::lower_nextsolver::generic_predicates_without_parent_with_diagnostics_query
)]
fn generic_predicates_without_parent_with_diagnostics<'db>(
&'db self,
def: GenericDefId,
) -> (crate::lower_nextsolver::GenericPredicates<'db>, Diagnostics);
#[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower_nextsolver::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent<'db>(
&'db self,
def: GenericDefId,
) -> crate::lower_nextsolver::GenericPredicates<'db>;
#[salsa::invoke(crate::lower_nextsolver::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId)
-> Arc<TraitEnvironment<'db>>;
#[salsa::invoke(crate::lower_nextsolver::trait_environment_query)]
fn trait_environment<'db>(&'db self, def: GenericDefId) -> Arc<TraitEnvironment<'db>>;
#[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)]
@ -258,7 +313,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn normalize_projection(
&self,
projection: crate::ProjectionTy,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
) -> Ty;
#[salsa::invoke(crate::traits::trait_solve_query)]
@ -272,87 +327,14 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::drop::has_drop_glue)]
#[salsa::cycle(cycle_result = crate::drop::has_drop_glue_cycle_result)]
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue;
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment<'_>>) -> DropGlue;
// next trait solver
#[salsa::invoke(crate::lower_nextsolver::ty_query)]
#[salsa::transparent]
fn ty_ns<'db>(
&'db self,
def: TyDefId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>;
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke(crate::lower_nextsolver::value_ty_query)]
fn value_ty_ns<'db>(
&'db self,
def: ValueTyDefId,
) -> Option<crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>>;
#[salsa::invoke(crate::lower_nextsolver::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::type_for_type_alias_with_diagnostics_cycle_result)]
fn type_for_type_alias_with_diagnostics_ns<'db>(
&'db self,
def: TypeAliasId,
) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::impl_self_ty_with_diagnostics_cycle_result)]
fn impl_self_ty_with_diagnostics_ns<'db>(
&'db self,
def: ImplId,
) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::impl_self_ty_query)]
#[salsa::transparent]
fn impl_self_ty_ns<'db>(
&'db self,
def: ImplId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower_nextsolver::const_param_ty_with_diagnostics_query)]
fn const_param_ty_with_diagnostics_ns<'db>(
&'db self,
def: ConstParamId,
) -> (crate::next_solver::Ty<'db>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::const_param_ty_query)]
#[salsa::transparent]
fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> crate::next_solver::Ty<'db>;
#[salsa::invoke(crate::lower_nextsolver::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics_ns<'db>(
&'db self,
def: ImplId,
) -> Option<(
crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>,
Diagnostics,
)>;
#[salsa::invoke(crate::lower_nextsolver::impl_trait_query)]
#[salsa::transparent]
fn impl_trait_ns<'db>(
&'db self,
def: ImplId,
) -> Option<crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>>;
#[salsa::invoke(crate::lower_nextsolver::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics_ns<'db>(
&'db self,
var: VariantId,
) -> (
Arc<
ArenaMap<
LocalFieldId,
crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>,
>,
>,
Diagnostics,
);
#[salsa::invoke(crate::lower_nextsolver::field_types_query)]
#[salsa::transparent]
fn field_types_ns<'db>(
@ -362,12 +344,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
ArenaMap<LocalFieldId, crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>>,
>;
#[salsa::invoke(crate::lower_nextsolver::callable_item_signature_query)]
fn callable_item_signature_ns<'db>(
&'db self,
def: CallableDefId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::PolyFnSig<'db>>;
#[salsa::invoke(crate::lower_nextsolver::return_type_impl_traits)]
fn return_type_impl_traits_ns<'db>(
&'db self,
@ -394,21 +370,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
&'db self,
def: GenericDefId,
) -> crate::lower_nextsolver::GenericPredicates<'db>;
#[salsa::invoke(
crate::lower_nextsolver::generic_predicates_without_parent_with_diagnostics_query
)]
fn generic_predicates_without_parent_with_diagnostics_ns<'db>(
&'db self,
def: GenericDefId,
) -> (crate::lower_nextsolver::GenericPredicates<'db>, Diagnostics);
#[salsa::invoke(crate::lower_nextsolver::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent_ns<'db>(
&'db self,
def: GenericDefId,
) -> crate::lower_nextsolver::GenericPredicates<'db>;
}
#[test]

View file

@ -81,17 +81,17 @@ impl BodyValidationDiagnostic {
}
}
struct ExprValidator {
struct ExprValidator<'db> {
owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult>,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
}
impl ExprValidator {
fn validate_body(&mut self, db: &dyn HirDatabase) {
impl<'db> ExprValidator<'db> {
fn validate_body(&mut self, db: &'db dyn HirDatabase) {
let mut filter_map_next_checker = None;
// we'll pass &mut self while iterating over body.exprs, so they need to be disjoint
let body = Arc::clone(&self.body);

View file

@ -70,7 +70,7 @@ pub(crate) struct MatchCheckCtx<'db> {
body: DefWithBodyId,
pub(crate) db: &'db dyn HirDatabase,
exhaustive_patterns: bool,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
}
impl<'db> MatchCheckCtx<'db> {
@ -78,7 +78,7 @@ impl<'db> MatchCheckCtx<'db> {
module: ModuleId,
body: DefWithBodyId,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
) -> Self {
let def_map = module.crate_def_map(db);
let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);

View file

@ -315,6 +315,22 @@ impl<'db> UnsafeVisitor<'db> {
}
_ => (),
}
let mut peeled = *expr;
while let Expr::Field { expr: lhs, .. } = &self.body[peeled] {
if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) =
self.infer.field_resolution(peeled)
{
peeled = *lhs;
} else {
break;
}
}
// Walk the peeled expression (the LHS of the union field chain)
self.walk_expr(peeled);
// Return so we don't recurse directly onto the union field access(es)
return;
}
Expr::MethodCall { .. } => {
if let Some((func, _)) = self.infer.method_resolution(current) {

View file

@ -46,8 +46,8 @@ use span::Edition;
use stdx::never;
use triomphe::Arc;
use crate::next_solver::infer::DbInternerInferExt;
use crate::next_solver::infer::traits::ObligationCause;
use crate::next_solver::{infer::DbInternerInferExt, mapping::NextSolverToChalk};
use crate::{
AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, ConstScalar,
ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData,
@ -792,19 +792,16 @@ fn render_const_scalar_ns(
let trait_env = TraitEnvironment::empty(f.krate());
let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block);
let infcx = interner.infer_ctxt().build(rustc_type_ir::TypingMode::PostAnalysis);
let ty = infcx
.at(&ObligationCause::new(), trait_env.env.to_nextsolver(interner))
.deeply_normalize(ty)
.unwrap_or(ty);
let ty = infcx.at(&ObligationCause::new(), trait_env.env).deeply_normalize(ty).unwrap_or(ty);
render_const_scalar_inner(f, b, memory_map, ty, trait_env)
}
fn render_const_scalar_inner(
fn render_const_scalar_inner<'db>(
f: &mut HirFormatter<'_>,
b: &[u8],
memory_map: &MemoryMap<'_>,
ty: crate::next_solver::Ty<'_>,
trait_env: Arc<TraitEnvironment>,
ty: crate::next_solver::Ty<'db>,
trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<(), HirDisplayError> {
use rustc_type_ir::TyKind;
match ty.kind() {
@ -1068,11 +1065,11 @@ fn render_const_scalar_inner(
}
}
fn render_variant_after_name(
fn render_variant_after_name<'db>(
data: &VariantFields,
f: &mut HirFormatter<'_>,
field_types: &ArenaMap<LocalFieldId, Binders<Ty>>,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'db>>,
layout: &Layout,
args: GenericArgs<'_>,
b: &[u8],
@ -1301,7 +1298,9 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> {
let def = def.0;
let sig = db
.callable_item_signature(def)
.substitute(Interner, &convert_args_for_result(interner, args.as_slice()));
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
if f.display_kind.is_source_code() {
// `FnDef` is anonymous and there's no surface syntax for it. Show it as a

View file

@ -7,6 +7,8 @@ use hir_def::signatures::StructFlags;
use stdx::never;
use triomphe::Arc;
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
@ -43,7 +45,11 @@ pub enum DropGlue {
HasDropGlue,
}
pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {
pub(crate) fn has_drop_glue(
db: &dyn HirDatabase,
ty: Ty,
env: Arc<TraitEnvironment<'_>>,
) -> DropGlue {
match ty.kind(Interner) {
TyKind::Adt(adt, subst) => {
if has_destructor(db, adt.0) {
@ -165,7 +171,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironm
fn projection_has_drop_glue(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
projection: ProjectionTy,
ty: Ty,
) -> DropGlue {
@ -178,13 +184,16 @@ fn projection_has_drop_glue(
}
}
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment<'_>>) -> bool {
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, env.krate) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build();
let goal = Canonical {
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
value: InEnvironment::new(
&env.env.to_chalk(DbInterner::new_with(db, Some(env.krate), env.block)),
trait_ref.cast(Interner),
),
binders: CanonicalVarKinds::empty(Interner),
};
db.trait_solve(env.krate, env.block, goal).certain()
@ -193,7 +202,7 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
pub(crate) fn has_drop_glue_cycle_result(
_db: &dyn HirDatabase,
_ty: Ty,
_env: Arc<TraitEnvironment>,
_env: Arc<TraitEnvironment<'_>>,
) -> DropGlue {
DropGlue::None
}

View file

@ -329,7 +329,7 @@ where
cb(MethodViolationCode::AsyncFn)?;
}
let sig = db.callable_item_signature_ns(func.into());
let sig = db.callable_item_signature(func.into());
if sig
.skip_binder()
.inputs()
@ -364,7 +364,7 @@ where
cb(MethodViolationCode::UndispatchableReceiver)?;
}
let predicates = &*db.generic_predicates_without_parent_ns(func.into());
let predicates = &*db.generic_predicates_without_parent(func.into());
for pred in predicates {
let pred = pred.kind().skip_binder();

View file

@ -19,6 +19,7 @@ pub(crate) mod closure;
mod coerce;
pub(crate) mod diagnostics;
mod expr;
mod fallback;
mod mutability;
mod pat;
mod path;
@ -53,16 +54,16 @@ use indexmap::IndexSet;
use intern::sym;
use la_arena::{ArenaMap, Entry};
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::Ty as _;
use stdx::{always, never};
use triomphe::Arc;
use crate::db::InternedClosureId;
use crate::{
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, ImplTraitId, ImplTraitIdx,
IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId, ParamLoweringMode,
PathLoweringDiagnostic, ProjectionTy, Substitution, TargetFeatures, TraitEnvironment, Ty,
TyBuilder, TyExt,
db::HirDatabase,
db::{HirDatabase, InternedClosureId},
fold_tys,
generics::Generics,
infer::{
@ -75,6 +76,7 @@ use crate::{
mir::MirSpan,
next_solver::{
self, DbInterner,
infer::{DefineOpaqueTypes, traits::ObligationCause},
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
static_lifetime, to_assoc_type_id,
@ -138,6 +140,20 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
ctx.infer_mut_body();
ctx.type_inference_fallback();
// Comment from rustc:
// Even though coercion casts provide type hints, we check casts after fallback for
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
let cast_checks = std::mem::take(&mut ctx.deferred_cast_checks);
for mut cast in cast_checks.into_iter() {
if let Err(diag) = cast.check(&mut ctx) {
ctx.diagnostics.push(diag);
}
}
ctx.table.select_obligations_where_possible();
ctx.infer_closures();
Arc::new(ctx.resolve_all())
@ -152,7 +168,7 @@ pub(crate) fn infer_cycle_result(_: &dyn HirDatabase, _: DefWithBodyId) -> Arc<I
/// This is appropriate to use only after type-check: it assumes
/// that normalization will succeed, for example.
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment>, ty: Ty) -> Ty {
pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment<'_>>, ty: Ty) -> Ty {
// FIXME: TypeFlags::HAS_CT_PROJECTION is not implemented in chalk, so TypeFlags::HAS_PROJECTION only
// works for the type case, so we check array unconditionally. Remove the array part
// when the bug in chalk becomes fixed.
@ -165,7 +181,6 @@ pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment>,
let ty_with_vars = table.normalize_associated_types_in(ty);
table.select_obligations_where_possible();
table.propagate_diverging_flag();
table.resolve_completely(ty_with_vars)
}
@ -632,6 +647,26 @@ impl InferenceResult {
pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
self.binding_modes.get(id).copied()
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn expression_types(&self) -> impl Iterator<Item = (ExprId, &Ty)> {
self.type_of_expr.iter()
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn pattern_types(&self) -> impl Iterator<Item = (PatId, &Ty)> {
self.type_of_pat.iter()
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn binding_types(&self) -> impl Iterator<Item = (BindingId, &Ty)> {
self.type_of_binding.iter()
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn return_position_impl_trait_types(&self) -> impl Iterator<Item = (ImplTraitIdx, &Ty)> {
self.type_of_rpit.iter()
}
}
impl Index<ExprId> for InferenceResult {
@ -666,6 +701,25 @@ impl Index<BindingId> for InferenceResult {
}
}
#[derive(Debug, Clone)]
struct InternedStandardTypesNextSolver<'db> {
unit: crate::next_solver::Ty<'db>,
never: crate::next_solver::Ty<'db>,
i32: crate::next_solver::Ty<'db>,
f64: crate::next_solver::Ty<'db>,
}
impl<'db> InternedStandardTypesNextSolver<'db> {
fn new(interner: DbInterner<'db>) -> Self {
Self {
unit: crate::next_solver::Ty::new_unit(interner),
never: crate::next_solver::Ty::new(interner, crate::next_solver::TyKind::Never),
i32: crate::next_solver::Ty::new_int(interner, rustc_type_ir::IntTy::I32),
f64: crate::next_solver::Ty::new_float(interner, rustc_type_ir::FloatTy::F64),
}
}
}
/// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)]
pub(crate) struct InferenceContext<'db> {
@ -698,6 +752,7 @@ pub(crate) struct InferenceContext<'db> {
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
breakables: Vec<BreakableContext<'db>>,
types: InternedStandardTypesNextSolver<'db>,
/// Whether we are inside the pattern of a destructuring assignment.
inside_assignment: bool,
@ -778,11 +833,13 @@ impl<'db> InferenceContext<'db> {
resolver: Resolver<'db>,
) -> Self {
let trait_env = db.trait_environment_for_body(owner);
let table = unify::InferenceTable::new(db, trait_env);
InferenceContext {
types: InternedStandardTypesNextSolver::new(table.interner),
target_features: OnceCell::new(),
generics: OnceCell::new(),
result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env),
table,
tuple_field_accesses_rev: Default::default(),
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
resume_yield_tys: None,
@ -845,24 +902,33 @@ impl<'db> InferenceContext<'db> {
self.result.has_errors = true;
}
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(mut self) -> InferenceResult {
self.table.select_obligations_where_possible();
self.table.fallback_if_possible();
/// Clones `self` and calls `resolve_all()` on it.
// FIXME: Remove this.
pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult {
let mut ctx = self.clone();
ctx.type_inference_fallback();
// Comment from rustc:
// Even though coercion casts provide type hints, we check casts after fallback for
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
let cast_checks = std::mem::take(&mut self.deferred_cast_checks);
let cast_checks = std::mem::take(&mut ctx.deferred_cast_checks);
for mut cast in cast_checks.into_iter() {
if let Err(diag) = cast.check(&mut self) {
self.diagnostics.push(diag);
if let Err(diag) = cast.check(&mut ctx) {
ctx.diagnostics.push(diag);
}
}
ctx.table.select_obligations_where_possible();
ctx.resolve_all()
}
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(self) -> InferenceResult {
let InferenceContext {
mut table, mut result, tuple_field_accesses_rev, diagnostics, ..
} = self;
@ -894,11 +960,6 @@ impl<'db> InferenceContext<'db> {
diagnostics: _,
} = &mut result;
// FIXME resolve obligations as well (use Guidance if necessary)
table.select_obligations_where_possible();
// make sure diverging type variables are marked as such
table.propagate_diverging_flag();
for ty in type_of_expr.values_mut() {
*ty = table.resolve_completely(ty.clone());
*has_errors = *has_errors || ty.contains_unknown();
@ -1653,6 +1714,22 @@ impl<'db> InferenceContext<'db> {
self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
}
fn demand_eqtype(
&mut self,
expected: crate::next_solver::Ty<'db>,
actual: crate::next_solver::Ty<'db>,
) {
let result = self
.table
.infer_ctxt
.at(&ObligationCause::new(), self.table.trait_env.env)
.eq(DefineOpaqueTypes::Yes, expected, actual)
.map(|infer_ok| self.table.register_infer_ok(infer_ok));
if let Err(_err) = result {
// FIXME: Emit diagnostic.
}
}
fn resolve_associated_type_with_params(
&mut self,
inner_ty: Ty,
@ -1708,6 +1785,7 @@ impl<'db> InferenceContext<'db> {
LifetimeElisionKind::Infer,
);
let mut path_ctx = ctx.at_path(path, node);
let interner = DbInterner::conjure();
let (resolution, unresolved) = if value_ns {
let Some(res) = path_ctx.resolve_path_in_value_ns(HygieneId::ROOT) else {
return (self.err_ty(), None);
@ -1717,15 +1795,27 @@ impl<'db> InferenceContext<'db> {
ValueNs::EnumVariantId(var) => {
let substs = path_ctx.substs_from_path(var.into(), true, false);
drop(ctx);
let ty = self.db.ty(var.lookup(self.db).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let args: crate::next_solver::GenericArgs<'_> =
substs.to_nextsolver(interner);
let ty = self
.db
.ty(var.lookup(self.db).parent.into())
.instantiate(interner, args)
.to_chalk(interner);
let ty = self.insert_type_vars(ty);
return (ty, Some(var.into()));
}
ValueNs::StructId(strukt) => {
let substs = path_ctx.substs_from_path(strukt.into(), true, false);
drop(ctx);
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let args: crate::next_solver::GenericArgs<'_> =
substs.to_nextsolver(interner);
let ty = self
.db
.ty(strukt.into())
.instantiate(interner, args)
.to_chalk(interner);
let ty = self.insert_type_vars(ty);
return (ty, Some(strukt.into()));
}
ValueNs::ImplSelf(impl_id) => (TypeNs::SelfType(impl_id), None),
@ -1746,28 +1836,37 @@ impl<'db> InferenceContext<'db> {
TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = path_ctx.substs_from_path(strukt.into(), true, false);
drop(ctx);
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self.db.ty(strukt.into()).instantiate(interner, args).to_chalk(interner);
let ty = self.insert_type_vars(ty);
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
}
TypeNs::AdtId(AdtId::UnionId(u)) => {
let substs = path_ctx.substs_from_path(u.into(), true, false);
drop(ctx);
let ty = self.db.ty(u.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self.db.ty(u.into()).instantiate(interner, args).to_chalk(interner);
let ty = self.insert_type_vars(ty);
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
}
TypeNs::EnumVariantId(var) => {
let substs = path_ctx.substs_from_path(var.into(), true, false);
drop(ctx);
let ty = self.db.ty(var.lookup(self.db).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self
.db
.ty(var.lookup(self.db).parent.into())
.instantiate(interner, args)
.to_chalk(interner);
let ty = self.insert_type_vars(ty);
forbid_unresolved_segments((ty, Some(var.into())), unresolved)
}
TypeNs::SelfType(impl_id) => {
let generics = crate::generics::generics(self.db, impl_id.into());
let substs = generics.placeholder_subst(self.db);
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let mut ty =
self.db.impl_self_ty(impl_id).instantiate(interner, args).to_chalk(interner);
let Some(remaining_idx) = unresolved else {
drop(ctx);
@ -1844,8 +1943,10 @@ impl<'db> InferenceContext<'db> {
};
let substs = path_ctx.substs_from_path_segment(it.into(), true, None, false);
drop(ctx);
let ty = self.db.ty(it.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
let interner = DbInterner::conjure();
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self.db.ty(it.into()).instantiate(interner, args).to_chalk(interner);
let ty = self.insert_type_vars(ty);
self.resolve_variant_on_alias(ty, unresolved, mod_path)
}

View file

@ -318,7 +318,7 @@ impl<'db> InferenceContext<'db> {
_ = self
.table
.infer_ctxt
.at(&ObligationCause::new(), self.table.param_env)
.at(&ObligationCause::new(), self.table.trait_env.env)
.eq(DefineOpaqueTypes::Yes, inferred_fnptr_sig, generalized_fnptr_sig)
.map(|infer_ok| self.table.register_infer_ok(infer_ok));
@ -703,7 +703,7 @@ impl<'db> InferenceContext<'db> {
let cause = ObligationCause::new();
let InferOk { value: (), obligations } = table
.infer_ctxt
.at(&cause, table.param_env)
.at(&cause, table.trait_env.env)
.eq(DefineOpaqueTypes::Yes, expected_ty, supplied_ty)?;
all_obligations.extend(obligations);
}
@ -711,7 +711,7 @@ impl<'db> InferenceContext<'db> {
let supplied_output_ty = supplied_sig.output();
let cause = ObligationCause::new();
let InferOk { value: (), obligations } =
table.infer_ctxt.at(&cause, table.param_env).eq(
table.infer_ctxt.at(&cause, table.trait_env.env).eq(
DefineOpaqueTypes::Yes,
expected_sigs.liberated_sig.output(),
supplied_output_ty,

View file

@ -144,7 +144,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub);
self.commit_if_ok(|this| {
let at = this.infer_ctxt().at(&this.cause, this.table.param_env);
let at = this.infer_ctxt().at(&this.cause, this.table.trait_env.env);
let res = if this.use_lub {
at.lub(b, a)
@ -210,9 +210,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
// Coercing from `!` to any type is allowed:
if a.is_never() {
// If we're coercing into an inference var, mark it as possibly diverging.
// FIXME: rustc does this differently.
if let TyKind::Infer(rustc_type_ir::TyVar(b)) = b.kind() {
self.table.set_diverging(b.as_u32().into(), chalk_ir::TyVariableKind::General);
if b.is_infer() {
self.table.set_diverging(b);
}
if self.coerce_never {
@ -330,7 +329,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
obligations.push(Obligation::new(
self.interner(),
self.cause.clone(),
self.table.param_env,
self.table.trait_env.env,
Binder::dummy(PredicateKind::Coerce(CoercePredicate {
a: source_ty,
b: target_ty,
@ -718,7 +717,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new(
self.interner(),
cause,
self.table.param_env,
self.table.trait_env.env,
TraitRef::new(
self.interner(),
coerce_unsized_did.into(),
@ -1114,8 +1113,12 @@ impl<'db> InferenceContext<'db> {
match self.table.commit_if_ok(|table| {
// We need to eagerly handle nested obligations due to lazy norm.
let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
let value =
ocx.lub(&ObligationCause::new(), table.param_env, prev_ty, new_ty)?;
let value = ocx.lub(
&ObligationCause::new(),
table.trait_env.env,
prev_ty,
new_ty,
)?;
if ocx.select_where_possible().is_empty() {
Ok(InferOk { value, obligations: ocx.into_pending_obligations() })
} else {
@ -1158,7 +1161,7 @@ impl<'db> InferenceContext<'db> {
let sig = self
.table
.infer_ctxt
.at(&ObligationCause::new(), self.table.param_env)
.at(&ObligationCause::new(), self.table.trait_env.env)
.lub(a_sig, b_sig)
.map(|ok| self.table.register_infer_ok(ok))?;
@ -1248,7 +1251,7 @@ impl<'db> InferenceContext<'db> {
.commit_if_ok(|table| {
table
.infer_ctxt
.at(&ObligationCause::new(), table.param_env)
.at(&ObligationCause::new(), table.trait_env.env)
.lub(prev_ty, new_ty)
})
.unwrap_err())
@ -1498,7 +1501,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
assert!(expression_ty.is_unit(), "if let hack without unit type");
icx.table
.infer_ctxt
.at(cause, icx.table.param_env)
.at(cause, icx.table.trait_env.env)
.eq(
// needed for tests/ui/type-alias-impl-trait/issue-65679-inst-opaque-ty-from-val-twice.rs
DefineOpaqueTypes::Yes,
@ -1564,9 +1567,9 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
}
}
pub fn could_coerce(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
pub fn could_coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
) -> bool {
coerce(db, env, tys).is_ok()
@ -1574,7 +1577,7 @@ pub fn could_coerce(
fn coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
) -> Result<(Vec<Adjustment>, crate::Ty), TypeError<DbInterner<'db>>> {
let mut table = InferenceTable::new(db, env);
@ -1609,16 +1612,21 @@ fn coerce<'db>(
chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
} == Some(iv))
};
let fallback = |iv, kind, default, binder| match kind {
chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
.map_or(default, |i| crate::BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
chalk_ir::VariableKind::Lifetime => find_var(iv).map_or(default, |i| {
crate::BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)
}),
chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or(default, |i| {
crate::BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)
}),
let fallback = |iv, kind, binder| match kind {
chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv).map_or_else(
|| chalk_ir::TyKind::Error.intern(Interner).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_ty(Interner).cast(Interner),
),
chalk_ir::VariableKind::Lifetime => find_var(iv).map_or_else(
|| crate::LifetimeData::Error.intern(Interner).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner),
),
chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or_else(
|| crate::unknown_const(ty.clone()).cast(Interner),
|i| crate::BoundVar::new(binder, i).to_const(Interner, ty.clone()).cast(Interner),
),
};
// FIXME also map the types in the adjustments
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferenceTable`.
Ok((adjustments, table.resolve_with_fallback(ty.to_chalk(table.interner), &fallback)))
}

View file

@ -23,13 +23,13 @@ use syntax::ast::RangeOp;
use tracing::debug;
use crate::autoderef::overloaded_deref_ty;
use crate::next_solver::ErrorGuaranteed;
use crate::next_solver::infer::DefineOpaqueTypes;
use crate::next_solver::obligation_ctxt::ObligationCtxt;
use crate::next_solver::{DbInterner, ErrorGuaranteed};
use crate::{
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext,
DeclOrigin, IncorrectGenericsLenKind, Interner, LifetimeElisionKind, Rawness, Scalar,
Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, consteval,
Adjust, Adjustment, AdtId, AutoBorrow, CallableDefId, CallableSig, DeclContext, DeclOrigin,
IncorrectGenericsLenKind, Interner, LifetimeElisionKind, Rawness, Scalar, Substitution,
TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, consteval,
generics::generics,
infer::{
AllowTwoPhase, BreakableKind,
@ -1481,7 +1481,10 @@ impl<'db> InferenceContext<'db> {
self.write_method_resolution(tgt_expr, func, subst.clone());
let method_ty = self.db.value_ty(func.into()).unwrap().substitute(Interner, &subst);
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> = subst.to_nextsolver(interner);
let method_ty =
self.db.value_ty(func.into()).unwrap().instantiate(interner, args).to_chalk(interner);
self.register_obligations_for_call(&method_ty);
self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()), ExprIsRead::Yes);
@ -1662,7 +1665,6 @@ impl<'db> InferenceContext<'db> {
});
self.resolver.reset_to_guard(g);
if let Some(prev_env) = prev_env {
self.table.param_env = prev_env.env.to_nextsolver(self.table.interner);
self.table.trait_env = prev_env;
}
@ -1801,11 +1803,17 @@ impl<'db> InferenceContext<'db> {
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
self.write_method_resolution(tgt_expr, func, substs.clone());
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> =
substs.to_nextsolver(interner);
self.check_method_call(
tgt_expr,
&[],
self.db.value_ty(func.into()).unwrap(),
substs,
self.db
.value_ty(func.into())
.unwrap()
.instantiate(interner, args)
.to_chalk(interner),
ty,
expected,
)
@ -1964,11 +1972,16 @@ impl<'db> InferenceContext<'db> {
let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
self.write_method_resolution(tgt_expr, func, substs.clone());
let interner = DbInterner::new_with(self.db, None, None);
let gen_args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
self.check_method_call(
tgt_expr,
args,
self.db.value_ty(func.into()).expect("we have a function def"),
substs,
self.db
.value_ty(func.into())
.expect("we have a function def")
.instantiate(interner, gen_args)
.to_chalk(interner),
ty,
expected,
)
@ -2013,11 +2026,15 @@ impl<'db> InferenceContext<'db> {
let recovered = match assoc_func_with_same_name {
Some(f) => {
let substs = self.substs_for_method_call(tgt_expr, f.into(), generic_args);
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> =
substs.to_nextsolver(interner);
let f = self
.db
.value_ty(f.into())
.expect("we have a function def")
.substitute(Interner, &substs);
.instantiate(interner, args)
.to_chalk(interner);
let sig = f.callable_sig(self.db).expect("we have a function def");
Some((f, sig, true))
}
@ -2057,12 +2074,10 @@ impl<'db> InferenceContext<'db> {
&mut self,
tgt_expr: ExprId,
args: &[ExprId],
method_ty: Binders<Ty>,
substs: Substitution,
method_ty: Ty,
receiver_ty: Ty,
expected: &Expectation,
) -> Ty {
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
let interner = self.table.interner;
let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
@ -2132,7 +2147,7 @@ impl<'db> InferenceContext<'db> {
let origin = ObligationCause::new();
ocx.sup(
&origin,
self.table.param_env,
self.table.trait_env.env,
expected_output.to_nextsolver(interner),
formal_output,
)?;
@ -2239,7 +2254,7 @@ impl<'db> InferenceContext<'db> {
let formal_ty_error = this
.table
.infer_ctxt
.at(&ObligationCause::new(), this.table.param_env)
.at(&ObligationCause::new(), this.table.trait_env.env)
.eq(DefineOpaqueTypes::Yes, formal_input_ty, coerced_ty);
// If neither check failed, the types are compatible

View file

@ -0,0 +1,439 @@
//! Fallback of infer vars to `!` and `i32`/`f64`.
use intern::sym;
use petgraph::{
Graph,
visit::{Dfs, Walker},
};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::{
TyVid,
inherent::{IntoKind, Ty as _},
};
use tracing::debug;
use crate::{
infer::InferenceContext,
next_solver::{CoercePredicate, PredicateKind, SubtypePredicate, Ty, TyKind},
};
#[derive(Copy, Clone)]
pub(crate) enum DivergingFallbackBehavior {
/// Always fallback to `()` (aka "always spontaneous decay")
ToUnit,
/// Sometimes fallback to `!`, but mainly fallback to `()` so that most of the crates are not broken.
ContextDependent,
/// Always fallback to `!` (which should be equivalent to never falling back + not making
/// never-to-any coercions unless necessary)
ToNever,
}
impl<'db> InferenceContext<'db> {
pub(super) fn type_inference_fallback(&mut self) {
debug!(
"type-inference-fallback start obligations: {:#?}",
self.table.fulfillment_cx.pending_obligations()
);
// All type checking constraints were added, try to fallback unsolved variables.
self.table.select_obligations_where_possible();
debug!(
"type-inference-fallback post selection obligations: {:#?}",
self.table.fulfillment_cx.pending_obligations()
);
let fallback_occurred = self.fallback_types();
if !fallback_occurred {
return;
}
// We now see if we can make progress. This might cause us to
// unify inference variables for opaque types, since we may
// have unified some other type variables during the first
// phase of fallback. This means that we only replace
// inference variables with their underlying opaque types as a
// last resort.
//
// In code like this:
//
// ```rust
// type MyType = impl Copy;
// fn produce() -> MyType { true }
// fn bad_produce() -> MyType { panic!() }
// ```
//
// we want to unify the opaque inference variable in `bad_produce`
// with the diverging fallback for `panic!` (e.g. `()` or `!`).
// This will produce a nice error message about conflicting concrete
// types for `MyType`.
//
// If we had tried to fallback the opaque inference variable to `MyType`,
// we will generate a confusing type-check error that does not explicitly
// refer to opaque types.
self.table.select_obligations_where_possible();
}
fn diverging_fallback_behavior(&self) -> DivergingFallbackBehavior {
if self.krate().data(self.db).edition.at_least_2024() {
return DivergingFallbackBehavior::ToNever;
}
if self.resolver.def_map().is_unstable_feature_enabled(&sym::never_type_fallback) {
return DivergingFallbackBehavior::ContextDependent;
}
DivergingFallbackBehavior::ToUnit
}
fn fallback_types(&mut self) -> bool {
// Check if we have any unresolved variables. If not, no need for fallback.
let unresolved_variables = self.table.infer_ctxt.unresolved_variables();
if unresolved_variables.is_empty() {
return false;
}
let diverging_fallback_behavior = self.diverging_fallback_behavior();
let diverging_fallback =
self.calculate_diverging_fallback(&unresolved_variables, diverging_fallback_behavior);
// We do fallback in two passes, to try to generate
// better error messages.
// The first time, we do *not* replace opaque types.
let mut fallback_occurred = false;
for ty in unresolved_variables {
debug!("unsolved_variable = {:?}", ty);
fallback_occurred |= self.fallback_if_possible(ty, &diverging_fallback);
}
fallback_occurred
}
// Tries to apply a fallback to `ty` if it is an unsolved variable.
//
// - Unconstrained ints are replaced with `i32`.
//
// - Unconstrained floats are replaced with `f64`.
//
// - Non-numerics may get replaced with `()` or `!`, depending on
// how they were categorized by `calculate_diverging_fallback`
// (and the setting of `#![feature(never_type_fallback)]`).
//
// Fallback becomes very dubious if we have encountered
// type-checking errors. In that case, fallback to Error.
//
// Sets `FnCtxt::fallback_has_occurred` if fallback is performed
// during this call.
fn fallback_if_possible(
&mut self,
ty: Ty<'db>,
diverging_fallback: &FxHashMap<Ty<'db>, Ty<'db>>,
) -> bool {
// Careful: we do NOT shallow-resolve `ty`. We know that `ty`
// is an unsolved variable, and we determine its fallback
// based solely on how it was created, not what other type
// variables it may have been unified with since then.
//
// The reason this matters is that other attempts at fallback
// may (in principle) conflict with this fallback, and we wish
// to generate a type error in that case. (However, this
// actually isn't true right now, because we're only using the
// builtin fallback rules. This would be true if we were using
// user-supplied fallbacks. But it's still useful to write the
// code to detect bugs.)
//
// (Note though that if we have a general type variable `?T`
// that is then unified with an integer type variable `?I`
// that ultimately never gets resolved to a special integral
// type, `?T` is not considered unsolved, but `?I` is. The
// same is true for float variables.)
let fallback = match ty.kind() {
TyKind::Infer(rustc_type_ir::IntVar(_)) => self.types.i32,
TyKind::Infer(rustc_type_ir::FloatVar(_)) => self.types.f64,
_ => match diverging_fallback.get(&ty) {
Some(&fallback_ty) => fallback_ty,
None => return false,
},
};
debug!("fallback_if_possible(ty={:?}): defaulting to `{:?}`", ty, fallback);
self.demand_eqtype(ty, fallback);
true
}
/// The "diverging fallback" system is rather complicated. This is
/// a result of our need to balance 'do the right thing' with
/// backwards compatibility.
///
/// "Diverging" type variables are variables created when we
/// coerce a `!` type into an unbound type variable `?X`. If they
/// never wind up being constrained, the "right and natural" thing
/// is that `?X` should "fallback" to `!`. This means that e.g. an
/// expression like `Some(return)` will ultimately wind up with a
/// type like `Option<!>` (presuming it is not assigned or
/// constrained to have some other type).
///
/// However, the fallback used to be `()` (before the `!` type was
/// added). Moreover, there are cases where the `!` type 'leaks
/// out' from dead code into type variables that affect live
/// code. The most common case is something like this:
///
/// ```rust
/// # fn foo() -> i32 { 4 }
/// match foo() {
/// 22 => Default::default(), // call this type `?D`
/// _ => return, // return has type `!`
/// } // call the type of this match `?M`
/// ```
///
/// Here, coercing the type `!` into `?M` will create a diverging
/// type variable `?X` where `?X <: ?M`. We also have that `?D <:
/// ?M`. If `?M` winds up unconstrained, then `?X` will
/// fallback. If it falls back to `!`, then all the type variables
/// will wind up equal to `!` -- this includes the type `?D`
/// (since `!` doesn't implement `Default`, we wind up a "trait
/// not implemented" error in code like this). But since the
/// original fallback was `()`, this code used to compile with `?D
/// = ()`. This is somewhat surprising, since `Default::default()`
/// on its own would give an error because the types are
/// insufficiently constrained.
///
/// Our solution to this dilemma is to modify diverging variables
/// so that they can *either* fallback to `!` (the default) or to
/// `()` (the backwards compatibility case). We decide which
/// fallback to use based on whether there is a coercion pattern
/// like this:
///
/// ```ignore (not-rust)
/// ?Diverging -> ?V
/// ?NonDiverging -> ?V
/// ?V != ?NonDiverging
/// ```
///
/// Here `?Diverging` represents some diverging type variable and
/// `?NonDiverging` represents some non-diverging type
/// variable. `?V` can be any type variable (diverging or not), so
/// long as it is not equal to `?NonDiverging`.
///
/// Intuitively, what we are looking for is a case where a
/// "non-diverging" type variable (like `?M` in our example above)
/// is coerced *into* some variable `?V` that would otherwise
/// fallback to `!`. In that case, we make `?V` fallback to `!`,
/// along with anything that would flow into `?V`.
///
/// The algorithm we use:
/// * Identify all variables that are coerced *into* by a
/// diverging variable. Do this by iterating over each
/// diverging, unsolved variable and finding all variables
/// reachable from there. Call that set `D`.
/// * Walk over all unsolved, non-diverging variables, and find
/// any variable that has an edge into `D`.
fn calculate_diverging_fallback(
&self,
unresolved_variables: &[Ty<'db>],
behavior: DivergingFallbackBehavior,
) -> FxHashMap<Ty<'db>, Ty<'db>> {
debug!("calculate_diverging_fallback({:?})", unresolved_variables);
// Construct a coercion graph where an edge `A -> B` indicates
// a type variable is that is coerced
let coercion_graph = self.create_coercion_graph();
// Extract the unsolved type inference variable vids; note that some
// unsolved variables are integer/float variables and are excluded.
let unsolved_vids = unresolved_variables.iter().filter_map(|ty| ty.ty_vid());
// Compute the diverging root vids D -- that is, the root vid of
// those type variables that (a) are the target of a coercion from
// a `!` type and (b) have not yet been solved.
//
// These variables are the ones that are targets for fallback to
// either `!` or `()`.
let diverging_roots: FxHashSet<TyVid> = self
.table
.diverging_type_vars
.iter()
.map(|&ty| self.shallow_resolve(ty))
.filter_map(|ty| ty.ty_vid())
.map(|vid| self.table.infer_ctxt.root_var(vid))
.collect();
debug!(
"calculate_diverging_fallback: diverging_type_vars={:?}",
self.table.diverging_type_vars
);
debug!("calculate_diverging_fallback: diverging_roots={:?}", diverging_roots);
// Find all type variables that are reachable from a diverging
// type variable. These will typically default to `!`, unless
// we find later that they are *also* reachable from some
// other type variable outside this set.
let mut roots_reachable_from_diverging = Dfs::empty(&coercion_graph);
let mut diverging_vids = vec![];
let mut non_diverging_vids = vec![];
for unsolved_vid in unsolved_vids {
let root_vid = self.table.infer_ctxt.root_var(unsolved_vid);
debug!(
"calculate_diverging_fallback: unsolved_vid={:?} root_vid={:?} diverges={:?}",
unsolved_vid,
root_vid,
diverging_roots.contains(&root_vid),
);
if diverging_roots.contains(&root_vid) {
diverging_vids.push(unsolved_vid);
roots_reachable_from_diverging.move_to(root_vid.as_u32().into());
// drain the iterator to visit all nodes reachable from this node
while roots_reachable_from_diverging.next(&coercion_graph).is_some() {}
} else {
non_diverging_vids.push(unsolved_vid);
}
}
debug!(
"calculate_diverging_fallback: roots_reachable_from_diverging={:?}",
roots_reachable_from_diverging,
);
// Find all type variables N0 that are not reachable from a
// diverging variable, and then compute the set reachable from
// N0, which we call N. These are the *non-diverging* type
// variables. (Note that this set consists of "root variables".)
let mut roots_reachable_from_non_diverging = Dfs::empty(&coercion_graph);
for &non_diverging_vid in &non_diverging_vids {
let root_vid = self.table.infer_ctxt.root_var(non_diverging_vid);
if roots_reachable_from_diverging.discovered.contains(root_vid.as_usize()) {
continue;
}
roots_reachable_from_non_diverging.move_to(root_vid.as_u32().into());
while roots_reachable_from_non_diverging.next(&coercion_graph).is_some() {}
}
debug!(
"calculate_diverging_fallback: roots_reachable_from_non_diverging={:?}",
roots_reachable_from_non_diverging,
);
debug!("obligations: {:#?}", self.table.fulfillment_cx.pending_obligations());
// For each diverging variable, figure out whether it can
// reach a member of N. If so, it falls back to `()`. Else
// `!`.
let mut diverging_fallback =
FxHashMap::with_capacity_and_hasher(diverging_vids.len(), FxBuildHasher);
for &diverging_vid in &diverging_vids {
let diverging_ty = Ty::new_var(self.table.interner, diverging_vid);
let root_vid = self.table.infer_ctxt.root_var(diverging_vid);
let can_reach_non_diverging = Dfs::new(&coercion_graph, root_vid.as_u32().into())
.iter(&coercion_graph)
.any(|n| roots_reachable_from_non_diverging.discovered.contains(n.index()));
let mut fallback_to = |ty| {
diverging_fallback.insert(diverging_ty, ty);
};
match behavior {
DivergingFallbackBehavior::ToUnit => {
debug!("fallback to () - legacy: {:?}", diverging_vid);
fallback_to(self.types.unit);
}
DivergingFallbackBehavior::ContextDependent => {
// FIXME: rustc does the following, but given this is only relevant when the unstable
// `never_type_fallback` feature is active, I chose to not port this.
// if found_infer_var_info.self_in_trait && found_infer_var_info.output {
// // This case falls back to () to ensure that the code pattern in
// // tests/ui/never_type/fallback-closure-ret.rs continues to
// // compile when never_type_fallback is enabled.
// //
// // This rule is not readily explainable from first principles,
// // but is rather intended as a patchwork fix to ensure code
// // which compiles before the stabilization of never type
// // fallback continues to work.
// //
// // Typically this pattern is encountered in a function taking a
// // closure as a parameter, where the return type of that closure
// // (checked by `relationship.output`) is expected to implement
// // some trait (checked by `relationship.self_in_trait`). This
// // can come up in non-closure cases too, so we do not limit this
// // rule to specifically `FnOnce`.
// //
// // When the closure's body is something like `panic!()`, the
// // return type would normally be inferred to `!`. However, it
// // needs to fall back to `()` in order to still compile, as the
// // trait is specifically implemented for `()` but not `!`.
// //
// // For details on the requirements for these relationships to be
// // set, see the relationship finding module in
// // compiler/rustc_trait_selection/src/traits/relationships.rs.
// debug!("fallback to () - found trait and projection: {:?}", diverging_vid);
// fallback_to(self.types.unit);
// }
if can_reach_non_diverging {
debug!("fallback to () - reached non-diverging: {:?}", diverging_vid);
fallback_to(self.types.unit);
} else {
debug!("fallback to ! - all diverging: {:?}", diverging_vid);
fallback_to(self.types.never);
}
}
DivergingFallbackBehavior::ToNever => {
debug!(
"fallback to ! - `rustc_never_type_mode = \"fallback_to_never\")`: {:?}",
diverging_vid
);
fallback_to(self.types.never);
}
}
}
diverging_fallback
}
/// Returns a graph whose nodes are (unresolved) inference variables and where
/// an edge `?A -> ?B` indicates that the variable `?A` is coerced to `?B`.
fn create_coercion_graph(&self) -> Graph<(), ()> {
let pending_obligations = self.table.fulfillment_cx.pending_obligations();
let pending_obligations_len = pending_obligations.len();
debug!("create_coercion_graph: pending_obligations={:?}", pending_obligations);
let coercion_edges = pending_obligations
.into_iter()
.filter_map(|obligation| {
// The predicates we are looking for look like `Coerce(?A -> ?B)`.
// They will have no bound variables.
obligation.predicate.kind().no_bound_vars()
})
.filter_map(|atom| {
// We consider both subtyping and coercion to imply 'flow' from
// some position in the code `a` to a different position `b`.
// This is then used to determine which variables interact with
// live code, and as such must fall back to `()` to preserve
// soundness.
//
// In practice currently the two ways that this happens is
// coercion and subtyping.
let (a, b) = match atom {
PredicateKind::Coerce(CoercePredicate { a, b }) => (a, b),
PredicateKind::Subtype(SubtypePredicate { a_is_expected: _, a, b }) => (a, b),
_ => return None,
};
let a_vid = self.root_vid(a)?;
let b_vid = self.root_vid(b)?;
Some((a_vid.as_u32(), b_vid.as_u32()))
});
let num_ty_vars = self.table.infer_ctxt.num_ty_vars();
let mut graph = Graph::with_capacity(num_ty_vars, pending_obligations_len);
for _ in 0..num_ty_vars {
graph.add_node(());
}
graph.extend_with_edges(coercion_edges);
graph
}
/// If `ty` is an unresolved type variable, returns its root vid.
fn root_vid(&self, ty: Ty<'db>) -> Option<TyVid> {
Some(self.table.infer_ctxt.root_var(self.shallow_resolve(ty).ty_vid()?))
}
}

View file

@ -17,7 +17,10 @@ use crate::{
generics::generics,
infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext,
method_resolution::{self, VisibleFromModule},
next_solver::mapping::ChalkToNextSolver,
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
to_chalk_trait_id,
};
@ -36,7 +39,9 @@ impl<'db> InferenceContext<'db> {
self.add_required_obligations_for_value_path(generic_def, &substs);
let ty = self.db.value_ty(value_def)?.substitute(Interner, &substs);
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = self.db.value_ty(value_def)?.instantiate(interner, args).to_chalk(interner);
let ty = self.process_remote_user_written_ty(ty);
Some(ty)
}
@ -69,8 +74,11 @@ impl<'db> InferenceContext<'db> {
}
ValueNs::ImplSelf(impl_id) => {
let generics = crate::generics::generics(self.db, impl_id.into());
let interner = DbInterner::new_with(self.db, None, None);
let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty =
self.db.impl_self_ty(impl_id).instantiate(interner, args).to_chalk(interner);
return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
Some(ValuePathResolution::GenericDef(
struct_id.into(),
@ -89,9 +97,9 @@ impl<'db> InferenceContext<'db> {
let generic_def = value_def.to_generic_def_id(self.db);
if let GenericDefId::StaticId(_) = generic_def {
let interner = DbInterner::new_with(self.db, None, None);
// `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders();
stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",);
let ty = self.db.value_ty(value_def)?.skip_binder().to_chalk(interner);
return Some(ValuePathResolution::NonGeneric(ty));
};
@ -354,10 +362,13 @@ impl<'db> InferenceContext<'db> {
};
let substs = match container {
ItemContainerId::ImplId(impl_id) => {
let interner = DbInterner::new_with(self.db, None, None);
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
.fill_with_inference_vars(&mut self.table)
.build();
let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
let args: crate::next_solver::GenericArgs<'_> = impl_substs.to_nextsolver(interner);
let impl_self_ty =
self.db.impl_self_ty(impl_id).instantiate(interner, args).to_chalk(interner);
self.unify(&impl_self_ty, &ty);
impl_substs
}

View file

@ -3,47 +3,42 @@
use std::fmt;
use chalk_ir::{
CanonicalVarKind, FloatTy, IntTy, TyVariableKind, cast::Cast, fold::TypeFoldable,
interner::HasInterner,
CanonicalVarKind, TyVariableKind, cast::Cast, fold::TypeFoldable, interner::HasInterner,
};
use either::Either;
use hir_def::{AdtId, lang_item::LangItem};
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::Ty as _;
use rustc_type_ir::{
FloatVid, IntVid, TyVid, TypeVisitableExt,
inherent::{IntoKind, Span, Term as _},
TyVid, TypeVisitableExt, UpcastFrom,
inherent::{IntoKind, Span, Term as _, Ty as _},
relate::{Relate, solver_relating::RelateExt},
solve::{Certainty, GoalSource, NoSolution},
solve::{Certainty, GoalSource},
};
use smallvec::SmallVec;
use triomphe::Arc;
use super::{InferResult, InferenceContext, TypeError};
use crate::next_solver::ErrorGuaranteed;
use crate::{
AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, GenericArg, GenericArgData,
Goal, GoalData, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind,
ProjectionTy, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
VariableKind, WhereClause,
InferenceVar, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution, TraitEnvironment, Ty,
TyExt, TyKind, VariableKind,
consteval::unknown_const,
db::HirDatabase,
fold_generic_args, fold_tys_and_consts,
next_solver::infer::InferOk,
next_solver::{
self, ClauseKind, DbInterner, ParamEnv, Predicate, PredicateKind, SolverDefIds, Term,
self, ClauseKind, DbInterner, ErrorGuaranteed, Predicate, PredicateKind, SolverDefIds,
Term, TraitRef,
fulfill::FulfillmentCtxt,
infer::{
DbInternerInferExt, InferCtxt,
DbInternerInferExt, InferCtxt, InferOk,
snapshot::CombinedSnapshot,
traits::{Obligation, ObligationCause},
},
inspect::{InspectConfig, InspectGoal, ProofTreeVisitor},
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
to_chalk_trait_id,
traits::{
FnTrait, NextTraitSolveResult, next_trait_solve_canonical_in_ctxt, next_trait_solve_in_ctxt,
},
@ -125,7 +120,7 @@ impl<'a, 'db> ProofTreeVisitor<'db> for NestedObligationsForSelfTy<'a, 'db> {
/// unresolved goal `T = U`.
pub fn could_unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
tys: &Canonical<(Ty, Ty)>,
) -> bool {
unify(db, env, tys).is_some()
@ -137,7 +132,7 @@ pub fn could_unify(
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
pub fn could_unify_deeply(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
tys: &Canonical<(Ty, Ty)>,
) -> bool {
let mut table = InferenceTable::new(db, env);
@ -147,7 +142,6 @@ pub fn could_unify_deeply(
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
table.select_obligations_where_possible();
table.propagate_diverging_flag();
let ty1_with_vars = table.resolve_completely(ty1_with_vars);
let ty2_with_vars = table.resolve_completely(ty2_with_vars);
table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
@ -155,7 +149,7 @@ pub fn could_unify_deeply(
pub(crate) fn unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
tys: &Canonical<(Ty, Ty)>,
) -> Option<Substitution> {
let mut table = InferenceTable::new(db, env);
@ -174,13 +168,19 @@ pub(crate) fn unify(
GenericArgData::Const(c) => c.inference_var(Interner),
} == Some(iv))
};
let fallback = |iv, kind, default, binder| match kind {
chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
.map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
chalk_ir::VariableKind::Lifetime => find_var(iv)
.map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
chalk_ir::VariableKind::Const(ty) => find_var(iv)
.map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
let fallback = |iv, kind, binder| match kind {
chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv).map_or_else(
|| TyKind::Error.intern(Interner).cast(Interner),
|i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner),
),
chalk_ir::VariableKind::Lifetime => find_var(iv).map_or_else(
|| crate::error_lifetime().cast(Interner),
|i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner),
),
chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or_else(
|| crate::unknown_const(ty.clone()).cast(Interner),
|i| BoundVar::new(binder, i).to_const(Interner, ty.clone()).cast(Interner),
),
};
Some(Substitution::from_iter(
Interner,
@ -216,22 +216,20 @@ bitflags::bitflags! {
pub(crate) struct InferenceTable<'db> {
pub(crate) db: &'db dyn HirDatabase,
pub(crate) interner: DbInterner<'db>,
pub(crate) trait_env: Arc<TraitEnvironment>,
pub(crate) param_env: ParamEnv<'db>,
pub(crate) trait_env: Arc<TraitEnvironment<'db>>,
pub(crate) tait_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>,
pub(crate) infer_ctxt: InferCtxt<'db>,
diverging_tys: FxHashSet<Ty>,
pub(super) fulfillment_cx: FulfillmentCtxt<'db>,
pub(super) diverging_type_vars: FxHashSet<crate::next_solver::Ty<'db>>,
}
pub(crate) struct InferenceTableSnapshot<'db> {
ctxt_snapshot: CombinedSnapshot,
obligations: FulfillmentCtxt<'db>,
diverging_tys: FxHashSet<Ty>,
}
impl<'db> InferenceTable<'db> {
pub(crate) fn new(db: &'db dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
pub(crate) fn new(db: &'db dyn HirDatabase, trait_env: Arc<TraitEnvironment<'db>>) -> Self {
let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
let infer_ctxt = interner.infer_ctxt().build(rustc_type_ir::TypingMode::Analysis {
defining_opaque_types_and_generators: SolverDefIds::new_from_iter(interner, []),
@ -239,12 +237,11 @@ impl<'db> InferenceTable<'db> {
InferenceTable {
db,
interner,
param_env: trait_env.env.to_nextsolver(interner),
trait_env,
tait_coercion_table: None,
fulfillment_cx: FulfillmentCtxt::new(&infer_ctxt),
infer_ctxt,
diverging_tys: FxHashSet::default(),
diverging_type_vars: FxHashSet::default(),
}
}
@ -327,74 +324,8 @@ impl<'db> InferenceTable<'db> {
}
}
/// Chalk doesn't know about the `diverging` flag, so when it unifies two
/// type variables of which one is diverging, the chosen root might not be
/// diverging and we have no way of marking it as such at that time. This
/// function goes through all type variables and make sure their root is
/// marked as diverging if necessary, so that resolving them gives the right
/// result.
pub(super) fn propagate_diverging_flag(&mut self) {
let mut new_tys = FxHashSet::default();
for ty in self.diverging_tys.iter() {
match ty.kind(Interner) {
TyKind::InferenceVar(var, kind) => match kind {
TyVariableKind::General => {
let root = InferenceVar::from(
self.infer_ctxt.root_var(TyVid::from_u32(var.index())).as_u32(),
);
if root.index() != var.index() {
new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner));
}
}
TyVariableKind::Integer => {
let root = InferenceVar::from(
self.infer_ctxt
.inner
.borrow_mut()
.int_unification_table()
.find(IntVid::from_usize(var.index() as usize))
.as_u32(),
);
if root.index() != var.index() {
new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner));
}
}
TyVariableKind::Float => {
let root = InferenceVar::from(
self.infer_ctxt
.inner
.borrow_mut()
.float_unification_table()
.find(FloatVid::from_usize(var.index() as usize))
.as_u32(),
);
if root.index() != var.index() {
new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner));
}
}
},
_ => {}
}
}
self.diverging_tys.extend(new_tys);
}
pub(super) fn set_diverging(&mut self, iv: InferenceVar, kind: TyVariableKind) {
self.diverging_tys.insert(TyKind::InferenceVar(iv, kind).intern(Interner));
}
fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
let is_diverging =
self.diverging_tys.contains(&TyKind::InferenceVar(iv, kind).intern(Interner));
if is_diverging {
return TyKind::Never.intern(Interner);
}
match kind {
TyVariableKind::General => TyKind::Error,
TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
}
.intern(Interner)
pub(super) fn set_diverging(&mut self, ty: crate::next_solver::Ty<'db>) {
self.diverging_type_vars.insert(ty);
}
pub(crate) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'db>, T>
@ -430,7 +361,7 @@ impl<'db> InferenceTable<'db> {
{
let ty = self.resolve_vars_with_obligations(ty);
self.infer_ctxt
.at(&ObligationCause::new(), self.param_env)
.at(&ObligationCause::new(), self.trait_env.env)
.deeply_normalize(ty.clone())
.unwrap_or(ty)
}
@ -535,7 +466,7 @@ impl<'db> InferenceTable<'db> {
let ty = var.to_ty(Interner, kind);
if diverging {
self.diverging_tys.insert(ty.clone());
self.diverging_type_vars.insert(ty.to_nextsolver(self.interner));
}
ty
}
@ -579,7 +510,7 @@ impl<'db> InferenceTable<'db> {
pub(crate) fn resolve_with_fallback<T>(
&mut self,
t: T,
fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
fallback: &dyn Fn(InferenceVar, VariableKind, DebruijnIndex) -> GenericArg,
) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
@ -621,7 +552,7 @@ impl<'db> InferenceTable<'db> {
fn resolve_with_fallback_inner<T>(
&mut self,
t: T,
fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
fallback: &dyn Fn(InferenceVar, VariableKind, DebruijnIndex) -> GenericArg,
) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
@ -638,53 +569,15 @@ impl<'db> InferenceTable<'db> {
T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'db, U>,
U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
let t = self.resolve_with_fallback(t, &|_, _, d, _| d);
let t = self.normalize_associated_types_in(t);
// let t = self.resolve_opaque_tys_in(t);
// Resolve again, because maybe normalization inserted infer vars.
self.resolve_with_fallback(t, &|_, _, d, _| d)
}
let value = t.to_nextsolver(self.interner);
let value = self.infer_ctxt.resolve_vars_if_possible(value);
/// Apply a fallback to unresolved scalar types. Integer type variables and float type
/// variables are replaced with i32 and f64, respectively.
///
/// This method is only intended to be called just before returning inference results (i.e. in
/// `InferenceContext::resolve_all()`).
///
/// FIXME: This method currently doesn't apply fallback to unconstrained general type variables
/// whereas rustc replaces them with `()` or `!`.
pub(super) fn fallback_if_possible(&mut self) {
let int_fallback = TyKind::Scalar(Scalar::Int(IntTy::I32)).intern(Interner);
let float_fallback = TyKind::Scalar(Scalar::Float(FloatTy::F64)).intern(Interner);
let mut goals = vec![];
let value = value.fold_with(&mut resolve_completely::Resolver::new(self, true, &mut goals));
let int_vars = self.infer_ctxt.inner.borrow_mut().int_unification_table().len();
for v in 0..int_vars {
let var = InferenceVar::from(v as u32).to_ty(Interner, TyVariableKind::Integer);
let maybe_resolved = self.resolve_ty_shallow(&var);
if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) {
// I don't think we can ever unify these vars with float vars, but keep this here for now
let fallback = match kind {
TyVariableKind::Integer => &int_fallback,
TyVariableKind::Float => &float_fallback,
TyVariableKind::General => unreachable!(),
};
self.unify(&var, fallback);
}
}
let float_vars = self.infer_ctxt.inner.borrow_mut().float_unification_table().len();
for v in 0..float_vars {
let var = InferenceVar::from(v as u32).to_ty(Interner, TyVariableKind::Float);
let maybe_resolved = self.resolve_ty_shallow(&var);
if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) {
// I don't think we can ever unify these vars with float vars, but keep this here for now
let fallback = match kind {
TyVariableKind::Integer => &int_fallback,
TyVariableKind::Float => &float_fallback,
TyVariableKind::General => unreachable!(),
};
self.unify(&var, fallback);
}
}
// FIXME(next-solver): Handle `goals`.
value.to_chalk(self.interner)
}
/// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that.
@ -745,7 +638,7 @@ impl<'db> InferenceTable<'db> {
) -> InferResult<'db, ()> {
let variance = rustc_type_ir::Variance::Invariant;
let span = crate::next_solver::Span::dummy();
match self.infer_ctxt.relate(self.param_env, lhs, variance, rhs, span) {
match self.infer_ctxt.relate(self.trait_env.env, lhs, variance, rhs, span) {
Ok(goals) => Ok(crate::infer::InferOk { goals, value: () }),
Err(_) => Err(TypeError),
}
@ -786,7 +679,7 @@ impl<'db> InferenceTable<'db> {
}
pub(crate) fn structurally_resolve_type(&mut self, ty: &Ty) -> Ty {
if let TyKind::Alias(..) = ty.kind(Interner) {
if let TyKind::Alias(chalk_ir::AliasTy::Projection(..)) = ty.kind(Interner) {
self.structurally_normalize_ty(ty)
} else {
self.resolve_vars_with_obligations(ty.to_nextsolver(self.interner))
@ -802,7 +695,7 @@ impl<'db> InferenceTable<'db> {
fn structurally_normalize_term(&mut self, term: Term<'db>) -> Term<'db> {
self.infer_ctxt
.at(&ObligationCause::new(), self.param_env)
.at(&ObligationCause::new(), self.trait_env.env)
.structurally_normalize_term(term, &mut self.fulfillment_cx)
.unwrap_or(term)
}
@ -822,7 +715,7 @@ impl<'db> InferenceTable<'db> {
// in a reentrant borrow, causing an ICE.
let result = self
.infer_ctxt
.at(&ObligationCause::misc(), self.param_env)
.at(&ObligationCause::misc(), self.trait_env.env)
.structurally_normalize_ty(ty, &mut self.fulfillment_cx);
match result {
Ok(normalized_ty) => normalized_ty,
@ -835,15 +728,13 @@ impl<'db> InferenceTable<'db> {
pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot<'db> {
let ctxt_snapshot = self.infer_ctxt.start_snapshot();
let diverging_tys = self.diverging_tys.clone();
let obligations = self.fulfillment_cx.clone();
InferenceTableSnapshot { ctxt_snapshot, diverging_tys, obligations }
InferenceTableSnapshot { ctxt_snapshot, obligations }
}
#[tracing::instrument(skip_all)]
pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot<'db>) {
self.infer_ctxt.rollback_to(snapshot.ctxt_snapshot);
self.diverging_tys = snapshot.diverging_tys;
self.fulfillment_cx = snapshot.obligations;
}
@ -877,26 +768,15 @@ impl<'db> InferenceTable<'db> {
/// whether a trait *might* be implemented before deciding to 'lock in' the
/// choice (during e.g. method resolution or deref).
#[tracing::instrument(level = "debug", skip(self))]
pub(crate) fn try_obligation(&mut self, goal: Goal) -> NextTraitSolveResult {
let in_env = InEnvironment::new(&self.trait_env.env, goal);
let canonicalized = self.canonicalize(in_env.to_nextsolver(self.interner));
pub(crate) fn try_obligation(&mut self, predicate: Predicate<'db>) -> NextTraitSolveResult {
let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
let canonicalized = self.canonicalize(goal);
next_trait_solve_canonical_in_ctxt(&self.infer_ctxt, canonicalized)
}
#[tracing::instrument(level = "debug", skip(self))]
pub(crate) fn solve_obligation(&mut self, goal: Goal) -> Result<Certainty, NoSolution> {
let goal = InEnvironment::new(&self.trait_env.env, goal);
let goal = goal.to_nextsolver(self.interner);
let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal);
result.map(|m| m.1)
}
pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) {
let goal = next_solver::Goal {
param_env: self.trait_env.env.to_nextsolver(self.interner),
predicate,
};
let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
self.register_obligation_in_env(goal)
}
@ -984,7 +864,7 @@ impl<'db> InferenceTable<'db> {
&mut self,
ty: &Ty,
num_args: usize,
) -> Option<(FnTrait, Vec<crate::next_solver::Ty<'db>>, crate::next_solver::Ty<'db>)> {
) -> Option<(FnTrait, Vec<next_solver::Ty<'db>>, next_solver::Ty<'db>)> {
for (fn_trait_name, output_assoc_name, subtraits) in [
(FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
(FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
@ -997,42 +877,34 @@ impl<'db> InferenceTable<'db> {
trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
let mut arg_tys = Vec::with_capacity(num_args);
let arg_ty = TyBuilder::tuple(num_args)
.fill(|it| {
let arg = match it {
ParamKind::Type => self.new_type_var(),
ParamKind::Lifetime => unreachable!("Tuple with lifetime parameter"),
ParamKind::Const(_) => unreachable!("Tuple with const parameter"),
};
arg_tys.push(arg.to_nextsolver(self.interner));
arg.cast(Interner)
let arg_ty = next_solver::Ty::new_tup_from_iter(
self.interner,
std::iter::repeat_with(|| {
let ty = self.next_ty_var();
arg_tys.push(ty);
ty
})
.build();
.take(num_args),
);
let args = [ty.to_nextsolver(self.interner), arg_ty];
let trait_ref = crate::next_solver::TraitRef::new(self.interner, fn_trait.into(), args);
let b = TyBuilder::trait_ref(self.db, fn_trait);
if b.remaining() != 2 {
return None;
}
let mut trait_ref = b.push(ty.clone()).push(arg_ty).build();
let projection = crate::next_solver::Ty::new_alias(
self.interner,
rustc_type_ir::AliasTyKind::Projection,
crate::next_solver::AliasTy::new(self.interner, output_assoc_type.into(), args),
);
let projection = TyBuilder::assoc_type_projection(
self.db,
output_assoc_type,
Some(trait_ref.substitution.clone()),
)
.fill_with_unknown()
.build();
let goal: Goal = trait_ref.clone().cast(Interner);
if !self.try_obligation(goal.clone()).no_solution() {
self.register_obligation(goal.to_nextsolver(self.interner));
let return_ty =
self.normalize_projection_ty(projection).to_nextsolver(self.interner);
let pred = crate::next_solver::Predicate::upcast_from(trait_ref, self.interner);
if !self.try_obligation(pred).no_solution() {
self.register_obligation(pred);
let return_ty = self.normalize_alias_ty(projection);
for &fn_x in subtraits {
let fn_x_trait = fn_x.get_id(self.db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
let goal = trait_ref.clone().cast(Interner);
if !self.try_obligation(goal).no_solution() {
let trait_ref =
crate::next_solver::TraitRef::new(self.interner, fn_x_trait.into(), args);
let pred = crate::next_solver::Predicate::upcast_from(trait_ref, self.interner);
if !self.try_obligation(pred).no_solution() {
return Some((fn_x, arg_tys, return_ty));
}
}
@ -1171,12 +1043,11 @@ impl<'db> InferenceTable<'db> {
let Some(sized) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
return false;
};
let sized_pred = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(sized),
substitution: Substitution::from1(Interner, ty),
});
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner);
self.try_obligation(goal).certain()
let sized_pred = Predicate::upcast_from(
TraitRef::new(self.interner, sized.into(), [ty.to_nextsolver(self.interner)]),
self.interner,
);
self.try_obligation(sized_pred).certain()
}
}
@ -1192,14 +1063,10 @@ impl fmt::Debug for InferenceTable<'_> {
mod resolve {
use super::InferenceTable;
use crate::{
ConcreteConst, Const, ConstData, ConstScalar, ConstValue, DebruijnIndex, GenericArg,
InferenceVar, Interner, Lifetime, Ty, TyVariableKind, VariableKind,
next_solver::mapping::NextSolverToChalk,
};
use chalk_ir::{
cast::Cast,
fold::{TypeFoldable, TypeFolder},
Const, DebruijnIndex, GenericArg, InferenceVar, Interner, Lifetime, Ty, TyVariableKind,
VariableKind, next_solver::mapping::NextSolverToChalk,
};
use chalk_ir::fold::{TypeFoldable, TypeFolder};
use rustc_type_ir::{FloatVid, IntVid, TyVid};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -1213,7 +1080,7 @@ mod resolve {
pub(super) struct Resolver<
'a,
'b,
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
F: Fn(InferenceVar, VariableKind, DebruijnIndex) -> GenericArg,
> {
pub(super) table: &'a mut InferenceTable<'b>,
pub(super) var_stack: &'a mut Vec<(InferenceVar, VarKind)>,
@ -1221,7 +1088,7 @@ mod resolve {
}
impl<F> TypeFolder<Interner> for Resolver<'_, '_, F>
where
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
F: Fn(InferenceVar, VariableKind, DebruijnIndex) -> GenericArg,
{
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner> {
self
@ -1243,8 +1110,7 @@ mod resolve {
let var = InferenceVar::from(vid.as_u32());
if self.var_stack.contains(&(var, VarKind::Ty(kind))) {
// recursive type
let default = self.table.fallback_value(var, kind).cast(Interner);
return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
return (self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone();
}
@ -1256,8 +1122,7 @@ mod resolve {
self.var_stack.pop();
result
} else {
let default = self.table.fallback_value(var, kind).cast(Interner);
(self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
(self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone()
}
@ -1273,8 +1138,7 @@ mod resolve {
let var = InferenceVar::from(vid.as_u32());
if self.var_stack.contains(&(var, VarKind::Ty(kind))) {
// recursive type
let default = self.table.fallback_value(var, kind).cast(Interner);
return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
return (self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone();
}
@ -1286,8 +1150,7 @@ mod resolve {
self.var_stack.pop();
result
} else {
let default = self.table.fallback_value(var, kind).cast(Interner);
(self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
(self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone()
}
@ -1303,8 +1166,7 @@ mod resolve {
let var = InferenceVar::from(vid.as_u32());
if self.var_stack.contains(&(var, VarKind::Ty(kind))) {
// recursive type
let default = self.table.fallback_value(var, kind).cast(Interner);
return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
return (self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone();
}
@ -1316,8 +1178,7 @@ mod resolve {
self.var_stack.pop();
result
} else {
let default = self.table.fallback_value(var, kind).cast(Interner);
(self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
(self.fallback)(var, VariableKind::Ty(kind), outer_binder)
.assert_ty_ref(Interner)
.clone()
}
@ -1336,15 +1197,9 @@ mod resolve {
.infer_ctxt
.root_const_var(rustc_type_ir::ConstVid::from_u32(var.index()));
let var = InferenceVar::from(vid.as_u32());
let default = ConstData {
ty: ty.clone(),
value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }),
}
.intern(Interner)
.cast(Interner);
if self.var_stack.contains(&(var, VarKind::Const)) {
// recursive
return (self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
return (self.fallback)(var, VariableKind::Const(ty), outer_binder)
.assert_const_ref(Interner)
.clone();
}
@ -1356,7 +1211,7 @@ mod resolve {
self.var_stack.pop();
result
} else {
(self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
(self.fallback)(var, VariableKind::Const(ty), outer_binder)
.assert_const_ref(Interner)
.clone()
}
@ -1375,3 +1230,124 @@ mod resolve {
}
}
}
mod resolve_completely {
use rustc_type_ir::{
DebruijnIndex, Flags, TypeFolder, TypeSuperFoldable,
inherent::{Const as _, Ty as _},
};
use crate::next_solver::Region;
use crate::{
infer::unify::InferenceTable,
next_solver::{
Const, DbInterner, ErrorGuaranteed, Goal, Predicate, Term, Ty,
infer::traits::ObligationCause,
normalize::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals,
},
};
pub(super) struct Resolver<'a, 'db> {
ctx: &'a mut InferenceTable<'db>,
/// Whether we should normalize, disabled when resolving predicates.
should_normalize: bool,
nested_goals: &'a mut Vec<Goal<'db, Predicate<'db>>>,
}
impl<'a, 'db> Resolver<'a, 'db> {
pub(super) fn new(
ctx: &'a mut InferenceTable<'db>,
should_normalize: bool,
nested_goals: &'a mut Vec<Goal<'db, Predicate<'db>>>,
) -> Resolver<'a, 'db> {
Resolver { ctx, nested_goals, should_normalize }
}
fn handle_term<T>(
&mut self,
value: T,
outer_exclusive_binder: impl FnOnce(T) -> DebruijnIndex,
) -> T
where
T: Into<Term<'db>> + TypeSuperFoldable<DbInterner<'db>> + Copy,
{
let value = if self.should_normalize {
let cause = ObligationCause::new();
let at = self.ctx.infer_ctxt.at(&cause, self.ctx.trait_env.env);
let universes = vec![None; outer_exclusive_binder(value).as_usize()];
match deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
at, value, universes,
) {
Ok((value, goals)) => {
self.nested_goals.extend(goals);
value
}
Err(_errors) => {
// FIXME: Report the error.
value
}
}
} else {
value
};
value.fold_with(&mut ReplaceInferWithError { interner: self.ctx.interner })
}
}
impl<'cx, 'db> TypeFolder<DbInterner<'db>> for Resolver<'cx, 'db> {
fn cx(&self) -> DbInterner<'db> {
self.ctx.interner
}
fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
if r.is_var() { Region::error(self.ctx.interner) } else { r }
}
fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.handle_term(ty, |it| it.outer_exclusive_binder())
}
fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> {
self.handle_term(ct, |it| it.outer_exclusive_binder())
}
fn fold_predicate(&mut self, predicate: Predicate<'db>) -> Predicate<'db> {
assert!(
!self.should_normalize,
"normalizing predicates in writeback is not generally sound"
);
predicate.super_fold_with(self)
}
}
struct ReplaceInferWithError<'db> {
interner: DbInterner<'db>,
}
impl<'db> TypeFolder<DbInterner<'db>> for ReplaceInferWithError<'db> {
fn cx(&self) -> DbInterner<'db> {
self.interner
}
fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
if t.is_infer() {
Ty::new_error(self.interner, ErrorGuaranteed)
} else {
t.super_fold_with(self)
}
}
fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
if c.is_ct_infer() {
Const::new_error(self.interner, ErrorGuaranteed)
} else {
c.super_fold_with(self)
}
}
fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
if r.is_var() { Region::error(self.interner) } else { r }
}
}
}

View file

@ -20,7 +20,7 @@ pub(crate) fn is_ty_uninhabited_from(
db: &dyn HirDatabase,
ty: &Ty,
target_mod: ModuleId,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
) -> bool {
let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
@ -36,7 +36,7 @@ pub(crate) fn is_enum_variant_uninhabited_from(
variant: EnumVariantId,
subst: &Substitution,
target_mod: ModuleId,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'_>>,
) -> bool {
let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
@ -52,7 +52,7 @@ struct UninhabitedFrom<'a> {
// guard for preventing stack overflow in non trivial non terminating types
max_depth: usize,
db: &'a dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'a>>,
}
const CONTINUE_OPAQUELY_INHABITED: ControlFlow<VisiblyUninhabited> = Continue(());

View file

@ -132,7 +132,7 @@ fn layout_of_simd_ty<'db>(
id: StructId,
repr_packed: bool,
args: &GenericArgs<'db>,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
dl: &TargetDataLayout,
) -> Result<Arc<Layout>, LayoutError> {
// Supported SIMD vectors are homogeneous ADTs with exactly one array field:
@ -160,7 +160,7 @@ fn layout_of_simd_ty<'db>(
pub fn layout_of_ty_query<'db>(
db: &'db dyn HirDatabase,
ty: Ty<'db>,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let interner = DbInterner::new_with(db, Some(krate), trait_env.block);
@ -371,7 +371,7 @@ pub fn layout_of_ty_query<'db>(
pub(crate) fn layout_of_ty_cycle_result<'db>(
_: &dyn HirDatabase,
_: Ty<'db>,
_: Arc<TraitEnvironment>,
_: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -23,7 +23,7 @@ pub fn layout_of_adt_query<'db>(
db: &'db dyn HirDatabase,
def: AdtId,
args: GenericArgs<'db>,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let Ok(target) = db.target_data_layout(krate) else {
@ -99,7 +99,7 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
_: &'db dyn HirDatabase,
_def: AdtId,
_args: GenericArgs<'db>,
_trait_env: Arc<TraitEnvironment>,
_trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -1,18 +1,17 @@
use base_db::target::TargetData;
use chalk_ir::{AdtId, TyKind};
use either::Either;
use hir_def::db::DefDatabase;
use project_model::{Sysroot, toolchain_info::QueryConfig};
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{GenericArgs as _, Ty as _};
use syntax::ToSmolStr;
use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{
Interner, Substitution,
db::HirDatabase,
layout::{Layout, LayoutError},
next_solver::{DbInterner, mapping::ChalkToNextSolver},
next_solver::{AdtDef, DbInterner, GenericArgs, mapping::ChalkToNextSolver},
setup_tracing,
test_db::TestDB,
};
@ -80,18 +79,18 @@ fn eval_goal(
Some(adt_or_type_alias_id)
})
.unwrap();
let goal_ty = match adt_or_type_alias_id {
Either::Left(adt_id) => {
TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner)
}
Either::Right(ty_id) => {
db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner))
}
};
salsa::attach(&db, || {
let interner = DbInterner::new_with(&db, None, None);
let goal_ty = match adt_or_type_alias_id {
Either::Left(adt_id) => crate::next_solver::Ty::new_adt(
interner,
AdtDef::new(adt_id, interner),
GenericArgs::identity_for_item(interner, adt_id.into()),
),
Either::Right(ty_id) => db.ty(ty_id.into()).instantiate_identity(),
};
db.layout_of_ty(
goal_ty.to_nextsolver(interner),
goal_ty,
db.trait_environment(match adt_or_type_alias_id {
Either::Left(adt) => hir_def::GenericDefId::AdtId(adt),
Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty),

View file

@ -72,7 +72,10 @@ use intern::{Symbol, sym};
use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::inherent::SliceLike;
use rustc_type_ir::{
UpcastFrom,
inherent::{SliceLike, Ty as _},
};
use syntax::ast::{ConstArg, make};
use traits::FnTrait;
use triomphe::Arc;
@ -85,7 +88,7 @@ use crate::{
infer::unify::InferenceTable,
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, convert_ty_for_result},
mapping::{ChalkToNextSolver, NextSolverToChalk, convert_ty_for_result},
},
};
@ -554,8 +557,10 @@ impl CallableSig {
pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig {
let callable_def = ToChalk::from_chalk(db, def);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let sig = db.callable_item_signature(callable_def);
sig.substitute(Interner, substs)
sig.instantiate(interner, args).skip_binder().to_chalk(interner)
}
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
CallableSig {
@ -916,10 +921,10 @@ where
Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
}
pub fn callable_sig_from_fn_trait(
pub fn callable_sig_from_fn_trait<'db>(
self_ty: &Ty,
trait_env: Arc<TraitEnvironment>,
db: &dyn HirDatabase,
trait_env: Arc<TraitEnvironment<'db>>,
db: &'db dyn HirDatabase,
) -> Option<(FnTrait, CallableSig)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
@ -936,26 +941,32 @@ pub fn callable_sig_from_fn_trait(
// Register two obligations:
// - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
let args_ty = table.new_type_var();
let mut trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build();
let projection = TyBuilder::assoc_type_projection(
db,
output_assoc_type,
Some(trait_ref.substitution.clone()),
)
.build();
let args_ty = table.next_ty_var();
let args = [self_ty.to_nextsolver(table.interner), args_ty];
let trait_ref = crate::next_solver::TraitRef::new(table.interner, fn_once_trait.into(), args);
let projection = crate::next_solver::Ty::new_alias(
table.interner,
rustc_type_ir::AliasTyKind::Projection,
crate::next_solver::AliasTy::new(table.interner, output_assoc_type.into(), args),
);
let goal: Goal = trait_ref.clone().cast(Interner);
let pred = goal.to_nextsolver(table.interner);
if !table.try_obligation(goal).no_solution() {
let pred = crate::next_solver::Predicate::upcast_from(trait_ref, table.interner);
if !table.try_obligation(pred).no_solution() {
table.register_obligation(pred);
let return_ty = table.normalize_projection_ty(projection);
let return_ty = table.normalize_alias_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
if !table.try_obligation(trait_ref.clone().cast(Interner)).no_solution() {
let ret_ty = table.resolve_completely(return_ty);
let args_ty = table.resolve_completely(args_ty);
let trait_ref =
crate::next_solver::TraitRef::new(table.interner, fn_x_trait.into(), args);
if !table
.try_obligation(crate::next_solver::Predicate::upcast_from(
trait_ref,
table.interner,
))
.no_solution()
{
let ret_ty = table.resolve_completely(return_ty.to_chalk(table.interner));
let args_ty = table.resolve_completely(args_ty.to_chalk(table.interner));
let params = args_ty
.as_tuple()?
.iter(Interner)

View file

@ -24,19 +24,18 @@ use chalk_ir::{
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LocalFieldId,
Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, UnionId, VariantId,
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
GenericParamId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId,
TypeOrConstParamId, UnionId, VariantId,
builtin_type::BuiltinType,
expr_store::{ExpressionStore, path::Path},
hir::generics::{GenericParamDataRef, TypeOrConstParamData, WherePredicate},
item_tree::FieldsShape,
lang_item::LangItem,
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs},
signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
signatures::{FunctionSignature, TraitFlags},
type_ref::{
ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier,
TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId,
ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, TypeBound, TypeRef,
TypeRefId,
},
};
use hir_expand::name::Name;
@ -46,11 +45,10 @@ use stdx::{impl_from, never};
use triomphe::{Arc, ThinArc};
use crate::{
AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DomainGoal, DynTy, FnAbi,
FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
LifetimeData, LifetimeOutlives, PolyFnSig, QuantifiedWhereClause, QuantifiedWhereClauses,
Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
all_super_traits,
AliasTy, Binders, BoundVar, Const, DebruijnIndex, DynTy, FnAbi, FnPointer, FnSig, FnSubst,
ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData, LifetimeOutlives,
QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitRef, TraitRefExt, Ty,
TyBuilder, TyKind, WhereClause, all_super_traits,
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::HirDatabase,
error_lifetime,
@ -60,7 +58,11 @@ use crate::{
path::{PathDiagnosticCallback, PathLoweringContext},
},
make_binders,
mapping::{ToChalk, from_chalk_trait_id, lt_to_placeholder_idx},
mapping::{from_chalk_trait_id, lt_to_placeholder_idx},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
static_lifetime, to_chalk_trait_id, to_placeholder_idx,
utils::all_super_trait_refs,
variable_kinds_from_iter,
@ -567,14 +569,6 @@ impl<'a> TyLoweringContext<'a> {
Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx))
}
fn lower_trait_ref(
&mut self,
trait_ref: &HirTraitRef,
explicit_self_ty: Ty,
) -> Option<TraitRef> {
self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0)
}
/// When lowering predicates from parents (impl, traits) for children defs (fns, consts, types), `generics` should
/// contain the `Generics` for the **child**, while `predicate_owner` should contain the `GenericDefId` of the
/// **parent**. This is important so we generate the correct bound var/placeholder.
@ -826,15 +820,6 @@ impl<'a> TyLoweringContext<'a> {
}
}
/// Build the signature of a callable item (function, struct or enum variant).
pub(crate) fn callable_item_signature_query(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
match def {
CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
}
}
fn named_associated_type_shorthand_candidates<R>(
db: &dyn HirDatabase,
// If the type parameter is defined in an impl and we're in a method, there
@ -862,21 +847,21 @@ fn named_associated_type_shorthand_candidates<R>(
})
};
let interner = DbInterner::new_with(db, None, None);
match res {
TypeNs::SelfType(impl_id) => {
// we're _in_ the impl -- the binders get added back later. Correct,
// but it would be nice to make this more explicit
let trait_ref = db.impl_trait(impl_id)?.into_value_and_skipped_binders().0;
let trait_ref = db.impl_trait(impl_id)?;
let impl_id_as_generic_def: GenericDefId = impl_id.into();
if impl_id_as_generic_def != def {
let subst = TyBuilder::subst_for_def(db, impl_id, None)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
.build();
let trait_ref = subst.apply(trait_ref, Interner);
let args: crate::next_solver::GenericArgs<'_> = subst.to_nextsolver(interner);
let trait_ref = trait_ref.instantiate(interner, args).to_chalk(interner);
search(trait_ref)
} else {
search(trait_ref)
search(trait_ref.skip_binder().to_chalk(interner))
}
}
TypeNs::GenericParam(param_id) => {
@ -919,7 +904,7 @@ pub(crate) fn field_types_query(
db: &dyn HirDatabase,
variant_id: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
db.field_types_with_diagnostics(variant_id).0
field_types_with_diagnostics_query(db, variant_id).0
}
/// Build the type of all specific fields of a struct or enum variant.
@ -1086,102 +1071,6 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
GenericPredicates(None)
}
pub(crate) fn trait_environment_for_body_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id(db) else {
let krate = def.module(db).krate();
return TraitEnvironment::empty(krate);
};
db.trait_environment(def)
}
pub(crate) fn trait_environment_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> Arc<TraitEnvironment> {
let generics = generics(db, def);
if generics.has_no_predicates() && generics.is_empty() {
return TraitEnvironment::empty(def.krate(db));
}
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
generics.store(),
def,
LifetimeElisionKind::AnonymousReportError,
)
.with_type_param_mode(ParamLoweringMode::Placeholder);
let mut traits_in_scope = Vec::new();
let mut clauses = Vec::new();
for maybe_parent_generics in
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
{
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
for pred in ctx.lower_where_predicate(pred, false) {
if let WhereClause::Implemented(tr) = pred.skip_binders() {
traits_in_scope
.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
}
let program_clause: Binders<DomainGoal> =
pred.map(|pred| pred.into_from_env_goal(Interner).cast(Interner));
clauses.push(program_clause);
}
}
}
if let Some(trait_id) = def.assoc_trait_container(db) {
// add `Self: Trait<T1, T2, ...>` to the environment in trait
// function default implementations (and speculative code
// inside consts or type aliases)
cov_mark::hit!(trait_self_implements_self);
let substs = TyBuilder::placeholder_subst(db, trait_id);
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
let pred = WhereClause::Implemented(trait_ref);
clauses.push(Binders::empty(
Interner,
pred.cast::<DomainGoal>(Interner).into_from_env_goal(Interner),
));
}
let subst = generics.placeholder_subst(db);
if !subst.is_empty(Interner) {
let explicitly_unsized_tys = ctx.unsized_types;
if let Some(implicitly_sized_clauses) =
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
{
clauses.extend(implicitly_sized_clauses.map(|pred| {
Binders::empty(
Interner,
pred.into_from_env_goal(Interner).cast::<DomainGoal>(Interner),
)
}));
};
}
let clauses = chalk_ir::ProgramClauses::from_iter(
Interner,
clauses.into_iter().map(|g| {
chalk_ir::ProgramClause::new(
Interner,
chalk_ir::ProgramClauseData(g.map(|g| chalk_ir::ProgramClauseImplication {
consequence: g,
conditions: chalk_ir::Goals::empty(Interner),
constraints: chalk_ir::Constraints::empty(Interner),
priority: chalk_ir::ClausePriority::High,
})),
)
}),
);
let env = chalk_ir::Environment { clauses };
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates(Option<Arc<[Binders<QuantifiedWhereClause>]>>);
@ -1410,208 +1299,6 @@ pub(crate) fn generic_defaults_with_diagnostics_cycle_result(
(GenericDefaults(None), None)
}
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let data = db.function_signature(def);
let resolver = def.resolver(db);
let mut ctx_params = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::for_fn_params(&data),
)
.with_type_param_mode(ParamLoweringMode::Variable);
let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr));
let ret = match data.ret_type {
Some(ret_type) => {
let mut ctx_ret = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::for_fn_ret(),
)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
ctx_ret.lower_ty(ret_type)
}
None => TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
};
let generics = generics(db, def.into());
let sig = CallableSig::from_params_and_return(
params,
ret,
data.is_varargs(),
if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe },
data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
);
make_binders(db, &generics, sig)
}
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
let generics = generics(db, def.into());
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
make_binders(
db,
&generics,
TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
)
}
/// Build the declared type of a const.
fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
let data = db.const_signature(def);
let generics = generics(db, def.into());
let resolver = def.resolver(db);
let parent = def.loc(db).container;
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::for_const(parent),
)
.with_type_param_mode(ParamLoweringMode::Variable);
make_binders(db, &generics, ctx.lower_ty(data.type_ref))
}
/// Build the declared type of a static.
fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
let data = db.static_signature(def);
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::Elided(static_lifetime()),
);
Binders::empty(Interner, ctx.lower_ty(data.type_ref))
}
fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
let field_tys = db.field_types(def.into());
let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone());
let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
Binders::new(
binders,
CallableSig::from_params_and_return(params, ret, false, Safety::Safe, FnAbi::RustCall),
)
}
/// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Binders<Ty>> {
let struct_data = def.fields(db);
match struct_data.shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
FieldsShape::Tuple => {
let generics = generics(db, AdtId::from(def).into());
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
Some(make_binders(
db,
&generics,
TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
))
}
}
}
fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
let field_tys = db.field_types(def.into());
let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone());
let parent = def.lookup(db).parent;
let (ret, binders) = type_for_adt(db, parent.into()).into_value_and_skipped_binders();
Binders::new(
binders,
CallableSig::from_params_and_return(params, ret, false, Safety::Safe, FnAbi::RustCall),
)
}
/// Build the type of a tuple enum variant constructor.
fn type_for_enum_variant_constructor(
db: &dyn HirDatabase,
def: EnumVariantId,
) -> Option<Binders<Ty>> {
let e = def.lookup(db).parent;
match def.fields(db).shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, e.into())),
FieldsShape::Tuple => {
let generics = generics(db, e.into());
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
Some(make_binders(
db,
&generics,
TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs)
.intern(Interner),
))
}
}
}
#[salsa_macros::tracked(cycle_result = type_for_adt_cycle_result)]
fn type_for_adt_tracked(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
type_for_adt(db, adt)
}
fn type_for_adt_cycle_result(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db, adt.into());
make_binders(db, &generics, TyKind::Error.intern(Interner))
}
fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db, adt.into());
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
make_binders(db, &generics, ty)
}
pub(crate) fn type_for_type_alias_with_diagnostics_query(
db: &dyn HirDatabase,
t: TypeAliasId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db, t.into());
let type_alias_data = db.type_alias_signature(t);
let mut diags = None;
let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) {
TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
} else {
let resolver = t.resolver(db);
let alias = db.type_alias_signature(t);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&alias.store,
t.into(),
LifetimeElisionKind::AnonymousReportError,
)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
let res = alias
.ty
.map(|type_ref| ctx.lower_ty(type_ref))
.unwrap_or_else(|| TyKind::Error.intern(Interner));
diags = create_diagnostics(ctx.diagnostics);
res
};
(make_binders(db, &generics, inner), diags)
}
pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result(
db: &dyn HirDatabase,
adt: TypeAliasId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db, adt.into());
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum TyDefId {
BuiltinType(BuiltinType),
@ -1644,64 +1331,8 @@ impl ValueTyDefId {
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
match def {
TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
TyDefId::AdtId(it) => type_for_adt_tracked(db, it),
TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0,
}
}
pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Option<Binders<Ty>> {
match def {
ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)),
ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())),
ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)),
ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)),
}
}
pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
db.impl_self_ty_with_diagnostics(impl_id).0
}
pub(crate) fn impl_self_ty_with_diagnostics_query(
db: &dyn HirDatabase,
impl_id: ImplId,
) -> (Binders<Ty>, Diagnostics) {
let impl_data = db.impl_signature(impl_id);
let resolver = impl_id.resolver(db);
let generics = generics(db, impl_id.into());
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&impl_data.store,
impl_id.into(),
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true },
)
.with_type_param_mode(ParamLoweringMode::Variable);
(
make_binders(db, &generics, ctx.lower_ty(impl_data.self_ty)),
create_diagnostics(ctx.diagnostics),
)
}
pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
db: &dyn HirDatabase,
impl_id: ImplId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db, impl_id.into());
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
}
pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
db.const_param_ty_with_diagnostics(def).0
const_param_ty_with_diagnostics_query(db, def).0
}
// returns None if def is a type arg
@ -1729,36 +1360,12 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
(ty, create_diagnostics(ctx.diagnostics))
}
pub(crate) fn const_param_ty_with_diagnostics_cycle_result(
pub(crate) fn const_param_ty_cycle_result(
_: &dyn HirDatabase,
_: crate::db::HirDatabaseData,
_: ConstParamId,
) -> (Ty, Diagnostics) {
(TyKind::Error.intern(Interner), None)
}
pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
db.impl_trait_with_diagnostics(impl_id).map(|it| it.0)
}
pub(crate) fn impl_trait_with_diagnostics_query(
db: &dyn HirDatabase,
impl_id: ImplId,
) -> Option<(Binders<TraitRef>, Diagnostics)> {
let impl_data = db.impl_signature(impl_id);
let resolver = impl_id.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&impl_data.store,
impl_id.into(),
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true },
)
.with_type_param_mode(ParamLoweringMode::Variable);
let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
let target_trait = impl_data.target_trait.as_ref()?;
let trait_ref = Binders::new(binders, ctx.lower_trait_ref(target_trait, self_ty)?);
Some((trait_ref, create_diagnostics(ctx.diagnostics)))
) -> Ty {
TyKind::Error.intern(Interner)
}
pub(crate) fn return_type_impl_traits(

View file

@ -28,6 +28,10 @@ use crate::{
error_lifetime,
generics::{Generics, generics},
lower::{LifetimeElisionKind, named_associated_type_shorthand_candidates},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::associated_type_by_name_including_super_traits,
};
@ -251,12 +255,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
// `def` can be either impl itself or item within, and we need impl itself
// now.
let generics = generics.parent_or_self();
let interner = DbInterner::new_with(self.ctx.db, None, None);
let subst = generics.placeholder_subst(self.ctx.db);
self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst)
let args: crate::next_solver::GenericArgs<'_> =
subst.to_nextsolver(interner);
self.ctx
.db
.impl_self_ty(impl_id)
.instantiate(interner, args)
.to_chalk(interner)
}
ParamLoweringMode::Variable => TyBuilder::impl_self_ty(self.ctx.db, impl_id)
.fill_with_bound_vars(self.ctx.in_binders, 0)
.build(),
.build(DbInterner::conjure())
.to_chalk(DbInterner::conjure()),
}
}
TypeNs::AdtSelfType(adt) => {
@ -267,7 +279,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
generics.bound_vars_subst(self.ctx.db, self.ctx.in_binders)
}
};
self.ctx.db.ty(adt.into()).substitute(Interner, &substs)
let interner = DbInterner::conjure();
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
self.ctx.db.ty(adt.into()).instantiate(interner, args).to_chalk(interner)
}
TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args),
@ -537,7 +551,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
TyDefId::TypeAliasId(it) => it.into(),
};
let substs = self.substs_from_path_segment(generic_def, infer_args, None, false);
self.ctx.db.ty(typeable).substitute(Interner, &substs)
let interner = DbInterner::conjure();
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
self.ctx.db.ty(typeable).instantiate(interner, args).to_chalk(interner)
}
/// Collect generic arguments from a path into a `Substs`. See also
@ -603,7 +619,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
explicit_self_ty: Option<Ty>,
lowering_assoc_type_generics: bool,
) -> Substitution {
let mut lifetime_elision = self.ctx.lifetime_elision.clone();
let old_lifetime_elision = self.ctx.lifetime_elision.clone();
if let Some(args) = self.current_or_prev_segment.args_and_bindings
&& args.parenthesized != GenericArgsParentheses::No
@ -633,19 +649,21 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
}
// `Fn()`-style generics are treated like functions for the purpose of lifetime elision.
lifetime_elision =
self.ctx.lifetime_elision =
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
}
self.substs_from_args_and_bindings(
let result = self.substs_from_args_and_bindings(
self.current_or_prev_segment.args_and_bindings,
def,
infer_args,
explicit_self_ty,
PathGenericsSource::Segment(self.current_segment_u32()),
lowering_assoc_type_generics,
lifetime_elision,
)
self.ctx.lifetime_elision.clone(),
);
self.ctx.lifetime_elision = old_lifetime_elision;
result
}
pub(super) fn substs_from_args_and_bindings(

View file

@ -19,9 +19,9 @@ use base_db::Crate;
use either::Either;
use hir_def::item_tree::FieldsShape;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstParamId, EnumVariantId, FunctionId, GenericDefId,
GenericParamId, ImplId, ItemContainerId, LocalFieldId, Lookup, StructId, TraitId, TypeAliasId,
TypeOrConstParamId, VariantId,
AdtId, AssocItemId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup,
StructId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId,
expr_store::{
ExpressionStore,
path::{GenericArg, Path},
@ -57,7 +57,7 @@ use triomphe::Arc;
use crate::ValueTyDefId;
use crate::{
FnAbi, ImplTraitId, Interner, ParamKind, TyDefId, TyLoweringDiagnostic,
FnAbi, ImplTraitId, Interner, ParamKind, TraitEnvironment, TyDefId, TyLoweringDiagnostic,
TyLoweringDiagnosticKind,
consteval_nextsolver::{intern_const_ref, path_to_const, unknown_const_as_generic},
db::HirDatabase,
@ -66,8 +66,10 @@ use crate::{
next_solver::{
AdtDef, AliasTy, Binder, BoundExistentialPredicates, BoundRegionKind, BoundTyKind,
BoundVarKind, BoundVarKinds, Clause, Clauses, Const, DbInterner, EarlyBinder,
EarlyParamRegion, ErrorGuaranteed, GenericArgs, PolyFnSig, Predicate, Region, SolverDefId,
TraitPredicate, TraitRef, Ty, Tys, abi::Safety, mapping::ChalkToNextSolver,
EarlyParamRegion, ErrorGuaranteed, GenericArgs, ParamEnv, PolyFnSig, Predicate, Region,
SolverDefId, TraitPredicate, TraitRef, Ty, Tys,
abi::Safety,
mapping::{ChalkToNextSolver, convert_ty_for_result},
},
};
@ -902,7 +904,7 @@ pub(crate) fn impl_trait_query<'db>(
db: &'db dyn HirDatabase,
impl_id: ImplId,
) -> Option<EarlyBinder<'db, TraitRef<'db>>> {
db.impl_trait_with_diagnostics_ns(impl_id).map(|it| it.0)
db.impl_trait_with_diagnostics(impl_id).map(|it| it.0)
}
pub(crate) fn impl_trait_with_diagnostics_query<'db>(
@ -918,7 +920,7 @@ pub(crate) fn impl_trait_with_diagnostics_query<'db>(
impl_id.into(),
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true },
);
let self_ty = db.impl_self_ty_ns(impl_id).skip_binder();
let self_ty = db.impl_self_ty(impl_id).skip_binder();
let target_trait = impl_data.target_trait.as_ref()?;
let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?);
Some((trait_ref, create_diagnostics(ctx.diagnostics)))
@ -984,7 +986,7 @@ pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBind
AdtDef::new(it, interner),
GenericArgs::identity_for_item(interner, it.into()),
)),
TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics_ns(it).0,
TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0,
}
}
@ -1129,7 +1131,7 @@ pub(crate) fn impl_self_ty_query<'db>(
db: &'db dyn HirDatabase,
impl_id: ImplId,
) -> EarlyBinder<'db, Ty<'db>> {
db.impl_self_ty_with_diagnostics_ns(impl_id).0
db.impl_self_ty_with_diagnostics(impl_id).0
}
pub(crate) fn impl_self_ty_with_diagnostics_query<'db>(
@ -1160,7 +1162,7 @@ pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
}
pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> {
db.const_param_ty_with_diagnostics_ns(def).0
db.const_param_ty_with_diagnostics(def).0
}
// returns None if def is a type arg
@ -1189,11 +1191,21 @@ pub(crate) fn const_param_ty_with_diagnostics_query<'db>(
(ty, create_diagnostics(ctx.diagnostics))
}
pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>(
db: &'db dyn HirDatabase,
_: crate::db::HirDatabaseData,
def: ConstParamId,
) -> (Ty<'db>, Diagnostics) {
let resolver = def.parent().resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
(Ty::new_error(interner, ErrorGuaranteed), None)
}
pub(crate) fn field_types_query<'db>(
db: &'db dyn HirDatabase,
variant_id: VariantId,
) -> Arc<ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>> {
db.field_types_with_diagnostics_ns(variant_id).0
db.field_types_with_diagnostics(variant_id).0
}
/// Build the type of all specific fields of a struct or enum variant.
@ -1355,6 +1367,18 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates<'db>(Option<Arc<[Clause<'db>]>>);
impl<'db> GenericPredicates<'db> {
pub fn instantiate(
&self,
interner: DbInterner<'db>,
args: GenericArgs<'db>,
) -> Option<impl Iterator<Item = Clause<'db>>> {
self.0
.as_ref()
.map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args))
}
}
impl<'db> ops::Deref for GenericPredicates<'db> {
type Target = [Clause<'db>];
@ -1363,6 +1387,122 @@ impl<'db> ops::Deref for GenericPredicates<'db> {
}
}
pub(crate) fn trait_environment_for_body_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Arc<TraitEnvironment<'_>> {
let Some(def) = def.as_generic_def_id(db) else {
let krate = def.module(db).krate();
return TraitEnvironment::empty(krate);
};
db.trait_environment(def)
}
pub(crate) fn trait_environment_query<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> Arc<TraitEnvironment<'db>> {
let generics = generics(db, def);
if generics.has_no_predicates() && generics.is_empty() {
return TraitEnvironment::empty(def.krate(db));
}
let interner = DbInterner::new_with(db, Some(def.krate(db)), None);
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
generics.store(),
def,
LifetimeElisionKind::AnonymousReportError,
);
let mut traits_in_scope = Vec::new();
let mut clauses = Vec::new();
for maybe_parent_generics in
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
{
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) {
if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() {
traits_in_scope
.push((convert_ty_for_result(interner, tr.self_ty()), tr.def_id().0));
}
clauses.push(pred);
}
}
}
if let Some(trait_id) = def.assoc_trait_container(db) {
// add `Self: Trait<T1, T2, ...>` to the environment in trait
// function default implementations (and speculative code
// inside consts or type aliases)
cov_mark::hit!(trait_self_implements_self);
let trait_ref = TraitRef::identity(ctx.interner, trait_id.into());
let clause = Clause(Predicate::new(
ctx.interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait(
TraitPredicate { trait_ref, polarity: rustc_type_ir::PredicatePolarity::Positive },
))),
));
clauses.push(clause);
}
let explicitly_unsized_tys = ctx.unsized_types;
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
if let Some(sized_trait) = sized_trait {
let (mut generics, mut def_id) =
(crate::next_solver::generics::generics(db, def.into()), def);
loop {
let self_idx = trait_self_param_idx(db, def_id);
for (idx, p) in generics.own_params.iter().enumerate() {
if let Some(self_idx) = self_idx
&& p.index() as usize == self_idx
{
continue;
}
let GenericParamId::TypeParamId(param_id) = p.id else {
continue;
};
let idx = idx as u32 + generics.parent_count as u32;
let param_ty = Ty::new_param(ctx.interner, param_id, idx, p.name.clone());
if explicitly_unsized_tys.contains(&param_ty) {
continue;
}
let trait_ref = TraitRef::new_from_args(
ctx.interner,
sized_trait.into(),
GenericArgs::new_from_iter(ctx.interner, [param_ty.into()]),
);
let clause = Clause(Predicate::new(
ctx.interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
));
clauses.push(clause);
}
if let Some(g) = generics.parent {
generics = crate::next_solver::generics::generics(db, g.into());
def_id = g;
} else {
break;
}
}
}
let clauses = rustc_type_ir::elaborate::elaborate(ctx.interner, clauses);
let clauses = Clauses::new_from_iter(ctx.interner, clauses);
let env = ParamEnv { clauses };
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum PredicateFilter {
SelfTrait,
@ -1830,7 +1970,8 @@ fn named_associated_type_shorthand_candidates<'db, R>(
let mut search = |t: TraitRef<'db>| -> Option<R> {
let trait_id = t.def_id.0;
let mut checked_traits = FxHashSet::default();
let mut check_trait = |trait_id: TraitId| {
let mut check_trait = |trait_ref: TraitRef<'db>| {
let trait_id = trait_ref.def_id.0;
let name = &db.trait_signature(trait_id).name;
tracing::debug!(?trait_id, ?name);
if !checked_traits.insert(trait_id) {
@ -1841,37 +1982,39 @@ fn named_associated_type_shorthand_candidates<'db, R>(
tracing::debug!(?data.items);
for (name, assoc_id) in &data.items {
if let &AssocItemId::TypeAliasId(alias) = assoc_id
&& let Some(ty) = check_alias(name, t, alias)
&& let Some(ty) = check_alias(name, trait_ref, alias)
{
return Some(ty);
}
}
None
};
let mut stack: SmallVec<[_; 4]> = smallvec![trait_id];
while let Some(trait_def_id) = stack.pop() {
if let Some(alias) = check_trait(trait_def_id) {
let mut stack: SmallVec<[_; 4]> = smallvec![t];
while let Some(trait_ref) = stack.pop() {
if let Some(alias) = check_trait(trait_ref) {
return Some(alias);
}
for pred in generic_predicates_filtered_by(
db,
GenericDefId::TraitId(trait_def_id),
GenericDefId::TraitId(trait_ref.def_id.0),
PredicateFilter::SelfTrait,
// We are likely in the midst of lowering generic predicates of `def`.
// So, if we allow `pred == def` we might fall into an infinite recursion.
// Actually, we have already checked for the case `pred == def` above as we started
// with a stack including `trait_id`
|pred| pred != def && pred == GenericDefId::TraitId(trait_def_id),
|pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0),
)
.0
.deref()
{
tracing::debug!(?pred);
let trait_id = match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(pred) => pred.def_id(),
let sup_trait_ref = match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(pred) => pred.trait_ref,
_ => continue,
};
stack.push(trait_id.0);
let sup_trait_ref =
EarlyBinder::bind(sup_trait_ref).instantiate(interner, trait_ref.args);
stack.push(sup_trait_ref);
}
tracing::debug!(?stack);
}
@ -1881,7 +2024,7 @@ fn named_associated_type_shorthand_candidates<'db, R>(
match res {
TypeNs::SelfType(impl_id) => {
let trait_ref = db.impl_trait_ns(impl_id)?;
let trait_ref = db.impl_trait(impl_id)?;
// FIXME(next-solver): same method in `lower` checks for impl or not
// Is that needed here?

View file

@ -287,7 +287,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
}
}
TypeNs::SelfType(impl_id) => self.ctx.db.impl_self_ty_ns(impl_id).skip_binder(),
TypeNs::SelfType(impl_id) => self.ctx.db.impl_self_ty(impl_id).skip_binder(),
TypeNs::AdtSelfType(adt) => {
let args = crate::next_solver::GenericArgs::identity_for_item(
self.ctx.interner,
@ -616,7 +616,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
explicit_self_ty: Option<Ty<'db>>,
lowering_assoc_type_generics: bool,
) -> crate::next_solver::GenericArgs<'db> {
let mut lifetime_elision = self.ctx.lifetime_elision.clone();
let old_lifetime_elision = self.ctx.lifetime_elision.clone();
if let Some(args) = self.current_or_prev_segment.args_and_bindings
&& args.parenthesized != GenericArgsParentheses::No
@ -646,19 +646,21 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
// `Fn()`-style generics are treated like functions for the purpose of lifetime elision.
lifetime_elision =
self.ctx.lifetime_elision =
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
}
self.substs_from_args_and_bindings(
let result = self.substs_from_args_and_bindings(
self.current_or_prev_segment.args_and_bindings,
def,
infer_args,
explicit_self_ty,
PathGenericsSource::Segment(self.current_segment_u32()),
lowering_assoc_type_generics,
lifetime_elision,
)
self.ctx.lifetime_elision.clone(),
);
self.ctx.lifetime_elision = old_lifetime_elision;
result
}
pub(super) fn substs_from_args_and_bindings(
@ -915,22 +917,36 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
);
if let Some(type_ref) = binding.type_ref {
match (&self.ctx.store[type_ref], self.ctx.impl_trait_mode.mode) {
(TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
(_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
let ty = self.ctx.lower_ty(type_ref);
let pred = Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Projection(ProjectionPredicate {
projection_term,
term: ty.into(),
}),
)),
));
predicates.push(pred);
let lifetime_elision =
if args_and_bindings.parenthesized == GenericArgsParentheses::ParenSugar {
// `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def).
LifetimeElisionKind::for_fn_ret(self.ctx.interner)
} else {
self.ctx.lifetime_elision.clone()
};
self.with_lifetime_elision(lifetime_elision, |this| {
match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) {
(TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
(
_,
ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque,
) => {
let ty = this.ctx.lower_ty(type_ref);
let pred = Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Projection(
ProjectionPredicate {
projection_term,
term: ty.into(),
},
),
)),
));
predicates.push(pred);
}
}
}
})
}
for bound in binding.bounds.iter() {
predicates.extend(self.ctx.lower_type_bound(

View file

@ -31,10 +31,13 @@ use crate::{
infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable},
lang_items::is_box,
next_solver::{
self, SolverDefId,
fulfill::FulfillmentCtxt,
infer::DefineOpaqueTypes,
self, DbInterner, SolverDefId,
infer::{
DefineOpaqueTypes,
traits::{ObligationCause, PredicateObligation},
},
mapping::{ChalkToNextSolver, NextSolverToChalk},
obligation_ctxt::ObligationCtxt,
},
primitive::{FloatTy, IntTy, UintTy},
to_chalk_trait_id,
@ -294,11 +297,12 @@ impl TraitImpls {
continue;
}
let target_trait = match db.impl_trait(impl_id) {
Some(tr) => tr.skip_binders().hir_trait_id(),
Some(tr) => tr.skip_binder().def_id.0,
None => continue,
};
let self_ty = db.impl_self_ty(impl_id);
let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
let interner = DbInterner::new_with(db, None, None);
let self_ty = db.impl_self_ty(impl_id).instantiate_identity().to_chalk(interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty);
map.entry(target_trait).or_default().entry(self_ty_fp).or_default().push(impl_id);
}
@ -411,8 +415,8 @@ impl InherentImpls {
continue;
}
let self_ty = db.impl_self_ty(impl_id);
let self_ty = self_ty.skip_binders();
let interner = DbInterner::new_with(db, None, None);
let self_ty = &db.impl_self_ty(impl_id).instantiate_identity().to_chalk(interner);
match is_inherent_impl_coherent(db, def_map, impl_id, self_ty) {
true => {
@ -542,7 +546,7 @@ pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option<Sma
pub(crate) fn lookup_method<'db>(
db: &'db dyn HirDatabase,
ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule,
name: &Name,
@ -711,7 +715,7 @@ impl ReceiverAdjustments {
pub(crate) fn iterate_method_candidates<'db, T>(
ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule,
name: Option<&Name>,
@ -739,9 +743,9 @@ pub(crate) fn iterate_method_candidates<'db, T>(
slot
}
pub fn lookup_impl_const(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
pub fn lookup_impl_const<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
const_id: ConstId,
subs: Substitution,
) -> (ConstId, Substitution) {
@ -767,9 +771,9 @@ pub fn lookup_impl_const(
/// Checks if the self parameter of `Trait` method is the `dyn Trait` and we should
/// call the method using the vtable.
pub fn is_dyn_method(
db: &dyn HirDatabase,
_env: Arc<TraitEnvironment>,
pub fn is_dyn_method<'db>(
db: &'db dyn HirDatabase,
_env: Arc<TraitEnvironment<'db>>,
func: FunctionId,
fn_subst: Substitution,
) -> Option<usize> {
@ -809,9 +813,9 @@ pub fn is_dyn_method(
/// Looks up the impl method that actually runs for the trait method `func`.
///
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
pub(crate) fn lookup_impl_method_query(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
pub(crate) fn lookup_impl_method_query<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution) {
@ -842,10 +846,10 @@ pub(crate) fn lookup_impl_method_query(
)
}
fn lookup_impl_assoc_item_for_trait_ref(
fn lookup_impl_assoc_item_for_trait_ref<'db>(
trait_ref: TraitRef,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
name: &Name,
) -> Option<(AssocItemId, Substitution)> {
let hir_trait_id = trait_ref.hir_trait_id();
@ -894,10 +898,13 @@ fn find_matching_impl(
table.run_in_snapshot(|table| {
let impl_substs =
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
let args: crate::next_solver::GenericArgs<'_> =
impl_substs.to_nextsolver(table.interner);
let trait_ref = db
.impl_trait(impl_)
.expect("non-trait method in find_matching_impl")
.substitute(Interner, &impl_substs);
.instantiate(table.interner, args)
.to_chalk(table.interner);
if !table.unify(&trait_ref, &actual_trait_ref) {
return None;
@ -907,10 +914,11 @@ fn find_matching_impl(
.into_iter()
.map(|b| -> Goal { b.cast(Interner) });
for goal in wcs {
if table.try_obligation(goal.clone()).no_solution() {
let goal = goal.to_nextsolver(table.interner);
if table.try_obligation(goal).no_solution() {
return None;
}
table.register_obligation(goal.to_nextsolver(table.interner));
table.register_obligation(goal);
}
Some((
impl_.impl_items(db),
@ -1014,7 +1022,9 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
let local_crate = impl_.lookup(db).container.krate();
let is_local = |tgt_crate| tgt_crate == local_crate;
let trait_ref = impl_trait.substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let trait_ref = impl_trait.instantiate(interner, args).to_chalk(interner);
let trait_id = from_chalk_trait_id(trait_ref.trait_id);
if is_local(trait_id.module(db).krate()) {
// trait to be implemented is local
@ -1063,7 +1073,7 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
pub fn iterate_path_candidates<'db>(
ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule,
name: Option<&Name>,
@ -1085,7 +1095,7 @@ pub fn iterate_path_candidates<'db>(
pub fn iterate_method_candidates_dyn<'db>(
ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule,
name: Option<&Name>,
@ -1347,7 +1357,7 @@ fn iterate_method_candidates_by_receiver<'db>(
fn iterate_method_candidates_for_self_ty<'db>(
self_ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
traits_in_scope: &FxHashSet<TraitId>,
visible_from_module: VisibleFromModule,
name: Option<&Name>,
@ -1395,7 +1405,7 @@ fn iterate_trait_method_candidates(
let db = table.db;
let canonical_self_ty = table.canonicalize(self_ty.clone().to_nextsolver(table.interner));
let TraitEnvironment { krate, .. } = *table.trait_env;
let krate = table.trait_env.krate;
'traits: for &t in traits_in_scope {
let data = db.trait_signature(t);
@ -1635,7 +1645,6 @@ pub(crate) fn resolve_indexing_op<'db>(
let ty = table.instantiate_canonical_ns(ty);
let deref_chain = autoderef_method_receiver(table, ty);
for (ty, adj) in deref_chain {
//let goal = generic_implements_goal_ns(db, &table.trait_env, index_trait, &ty);
let goal = generic_implements_goal_ns(table, index_trait, ty);
if !next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() {
return Some(adj);
@ -1694,8 +1703,10 @@ fn is_valid_impl_method_candidate(
return IsValidCandidate::NotVisible;
}
let self_ty_matches = table.run_in_snapshot(|table| {
let expected_self_ty =
TyBuilder::impl_self_ty(db, impl_id).fill_with_inference_vars(table).build();
let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id)
.fill_with_inference_vars(table)
.build(DbInterner::conjure())
.to_chalk(DbInterner::conjure());
table.unify(&expected_self_ty, self_ty)
});
if !self_ty_matches {
@ -1741,9 +1752,13 @@ fn is_valid_trait_method_candidate(
.fill_with_inference_vars(table)
.build();
let args: crate::next_solver::GenericArgs<'_> =
fn_subst.to_nextsolver(table.interner);
let sig = db.callable_item_signature(fn_id.into());
let expected_receiver =
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
let expected_receiver = sig
.map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0])
.instantiate(table.interner, args)
.to_chalk(table.interner);
// FIXME: Clean up this mess with some context struct like rustc's `ProbeContext`
let variance = match mode {
@ -1754,7 +1769,7 @@ fn is_valid_trait_method_candidate(
.infer_ctxt
.at(
&next_solver::infer::traits::ObligationCause::dummy(),
table.trait_env.env.to_nextsolver(table.interner),
table.trait_env.env,
)
.relate(
DefineOpaqueTypes::No,
@ -1767,12 +1782,10 @@ fn is_valid_trait_method_candidate(
};
if !infer_ok.obligations.is_empty() {
let mut ctxt = FulfillmentCtxt::new(&table.infer_ctxt);
for pred in infer_ok.into_obligations() {
ctxt.register_predicate_obligation(&table.infer_ctxt, pred);
}
let mut ctxt = ObligationCtxt::new(&table.infer_ctxt);
ctxt.register_obligations(infer_ok.into_obligations());
// FIXME: Are we doing this correctly? Probably better to follow rustc more closely.
check_that!(ctxt.select_where_possible(&table.infer_ctxt).is_empty());
check_that!(ctxt.select_where_possible().is_empty());
}
check_that!(table.unify(receiver_ty, &expected_receiver));
@ -1815,9 +1828,11 @@ fn is_valid_impl_fn_candidate(
}
table.run_in_snapshot(|table| {
let _p = tracing::info_span!("subst_for_def").entered();
let impl_subst =
TyBuilder::subst_for_def(db, impl_id, None).fill_with_inference_vars(table).build();
let expect_self_ty = db.impl_self_ty(impl_id).substitute(Interner, &impl_subst);
let impl_subst = table.infer_ctxt.fresh_args_for_item(impl_id.into());
let expect_self_ty = db
.impl_self_ty(impl_id)
.instantiate(table.interner, &impl_subst)
.to_chalk(table.interner);
check_that!(table.unify(&expect_self_ty, self_ty));
@ -1825,65 +1840,49 @@ fn is_valid_impl_fn_candidate(
let _p = tracing::info_span!("check_receiver_ty").entered();
check_that!(data.has_self_param());
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
.fill_with_inference_vars(table)
.build();
let fn_subst: crate::Substitution =
table.infer_ctxt.fresh_args_for_item(fn_id.into()).to_chalk(table.interner);
let args: crate::next_solver::GenericArgs<'_> = fn_subst.to_nextsolver(table.interner);
let sig = db.callable_item_signature(fn_id.into());
let expected_receiver =
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
let expected_receiver = sig
.map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0])
.instantiate(table.interner, args)
.to_chalk(table.interner);
check_that!(table.unify(receiver_ty, &expected_receiver));
}
// We need to consider the bounds on the impl to distinguish functions of the same name
// for a type.
let predicates = db.generic_predicates(impl_id.into());
let goals = predicates.iter().map(|p| {
let (p, b) = p
.clone()
.substitute(Interner, &impl_subst)
// Skipping the inner binders is ok, as we don't handle quantified where
// clauses yet.
.into_value_and_skipped_binders();
stdx::always!(b.len(Interner) == 0);
let predicates = db.generic_predicates_ns(impl_id.into());
let Some(predicates) = predicates.instantiate(table.interner, impl_subst) else {
return IsValidCandidate::Yes;
};
p.cast::<Goal>(Interner)
});
let mut ctxt = ObligationCtxt::new(&table.infer_ctxt);
for goal in goals.clone() {
match table.solve_obligation(goal) {
Ok(_) => {}
Err(_) => {
return IsValidCandidate::No;
}
}
ctxt.register_obligations(predicates.into_iter().map(|p| {
PredicateObligation::new(
table.interner,
ObligationCause::new(),
table.trait_env.env,
p.0,
)
}));
if ctxt.select_where_possible().is_empty() {
IsValidCandidate::Yes
} else {
IsValidCandidate::No
}
for goal in goals {
if table.try_obligation(goal).no_solution() {
return IsValidCandidate::No;
}
}
IsValidCandidate::Yes
})
}
pub fn implements_trait(
pub fn implements_trait_unique<'db>(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: &TraitEnvironment,
trait_: TraitId,
) -> bool {
let goal = generic_implements_goal(db, env, trait_, ty);
!db.trait_solve(env.krate, env.block, goal.cast(Interner)).no_solution()
}
pub fn implements_trait_unique(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
env: &TraitEnvironment,
db: &'db dyn HirDatabase,
env: &TraitEnvironment<'db>,
trait_: TraitId,
) -> bool {
let goal = generic_implements_goal(db, env, trait_, ty);
@ -1891,11 +1890,11 @@ pub fn implements_trait_unique(
}
/// This creates Substs for a trait with the given Self type and type variables
/// for all other parameters, to query Chalk with it.
/// for all other parameters, to query next solver with it.
#[tracing::instrument(skip_all)]
fn generic_implements_goal(
db: &dyn HirDatabase,
env: &TraitEnvironment,
fn generic_implements_goal<'db>(
db: &'db dyn HirDatabase,
env: &TraitEnvironment<'db>,
trait_: TraitId,
self_ty: &Canonical<Ty>,
) -> Canonical<InEnvironment<super::DomainGoal>> {
@ -1917,7 +1916,10 @@ fn generic_implements_goal(
let binders = CanonicalVarKinds::from_iter(Interner, kinds);
let obligation = trait_ref.cast(Interner);
let value = InEnvironment::new(&env.env, obligation);
let value = InEnvironment::new(
&env.env.to_chalk(DbInterner::new_with(db, Some(env.krate), env.block)),
obligation,
);
Canonical { binders, value }
}
@ -1934,11 +1936,7 @@ fn generic_implements_goal_ns<'db>(
let trait_ref =
rustc_type_ir::TraitRef::new_from_args(table.infer_ctxt.interner, trait_.into(), args)
.with_replaced_self_ty(table.infer_ctxt.interner, self_ty);
let goal = next_solver::Goal::new(
table.infer_ctxt.interner,
table.trait_env.env.to_nextsolver(table.infer_ctxt.interner),
trait_ref,
);
let goal = next_solver::Goal::new(table.infer_ctxt.interner, table.trait_env.env, trait_ref);
table.canonicalize(goal)
}

View file

@ -165,7 +165,7 @@ enum MirOrDynIndex {
pub struct Evaluator<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'a>>,
target_data_layout: Arc<TargetDataLayout>,
stack: Vec<u8>,
heap: Vec<u8>,
@ -432,9 +432,12 @@ impl MirEvalError {
let self_ = match func.lookup(db).container {
ItemContainerId::ImplId(impl_id) => Some({
let generics = crate::generics::generics(db, impl_id.into());
let interner = DbInterner::new_with(db, None, None);
let substs = generics.placeholder_subst(db);
let args: crate::next_solver::GenericArgs<'_> =
substs.to_nextsolver(interner);
db.impl_self_ty(impl_id)
.substitute(Interner, &substs)
.instantiate(interner, args)
.display(db, display_target)
.to_string()
}),
@ -582,8 +585,8 @@ impl MirOutput {
}
}
pub fn interpret_mir(
db: &dyn HirDatabase,
pub fn interpret_mir<'db>(
db: &'db dyn HirDatabase,
body: Arc<MirBody>,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
@ -591,7 +594,7 @@ pub fn interpret_mir(
// a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
// (and probably should) do better here, for example by excluding bindings outside of the target expression.
assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>,
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<(Result<Const>, MirOutput)> {
let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
@ -632,11 +635,11 @@ const EXECUTION_LIMIT: usize = 10_000_000;
impl<'db> Evaluator<'db> {
pub fn new(
db: &dyn HirDatabase,
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Evaluator<'_>> {
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Evaluator<'db>> {
let crate_id = owner.module(db).krate();
let target_data_layout = match db.target_data_layout(crate_id) {
Ok(target_data_layout) => target_data_layout,

View file

@ -14,6 +14,7 @@ use hir_expand::name::Name;
use intern::{Symbol, sym};
use stdx::never;
use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
DropGlue,
display::DisplayTarget,
@ -1371,9 +1372,8 @@ impl Evaluator<'_> {
result = (l as i8).cmp(&(r as i8));
}
if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
let ty = self.db.ty(e.into());
let r = self
.compute_discriminant(ty.skip_binders().clone(), &[result as i8 as u8])?;
let ty = self.db.ty(e.into()).skip_binder().to_chalk(interner);
let r = self.compute_discriminant(ty.clone(), &[result as i8 as u8])?;
destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;
Ok(())
} else {

View file

@ -43,7 +43,10 @@ use crate::{
Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar,
return_slot,
},
next_solver::{DbInterner, mapping::ChalkToNextSolver},
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
static_lifetime,
traits::FnTrait,
utils::ClosureSubst,
@ -82,7 +85,7 @@ struct MirLowerCtx<'db> {
infer: &'db InferenceResult,
resolver: Resolver<'db>,
drop_scopes: Vec<DropScope>,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
}
// FIXME: Make this smaller, its stored in database queries
@ -2207,8 +2210,13 @@ pub fn lower_to_mir(
// otherwise it's an inline const, and has no parameter
if let DefWithBodyId::FunctionId(fid) = owner {
let substs = TyBuilder::placeholder_subst(db, fid);
let callable_sig =
db.callable_item_signature(fid.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(fid.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let mut params = callable_sig.params().iter();
let self_param = body.self_param.and_then(|id| Some((id, params.next()?.clone())));
break 'b ctx.lower_params_and_bindings(

View file

@ -35,7 +35,7 @@ macro_rules! not_supported {
struct Filler<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
trait_env: Arc<TraitEnvironment<'a>>,
subst: &'a Substitution,
generics: Option<Generics>,
}
@ -301,11 +301,11 @@ impl Filler<'_> {
}
}
pub fn monomorphized_mir_body_query(
db: &dyn HirDatabase,
pub fn monomorphized_mir_body_query<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
trait_env: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics };
@ -315,20 +315,20 @@ pub fn monomorphized_mir_body_query(
Ok(Arc::new(body))
}
pub(crate) fn monomorphized_mir_body_cycle_result(
_db: &dyn HirDatabase,
pub(crate) fn monomorphized_mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
_: DefWithBodyId,
_: Substitution,
_: Arc<crate::TraitEnvironment>,
_: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
Err(MirLowerError::Loop)
}
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
pub fn monomorphized_mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
closure: InternedClosureId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
trait_env: Arc<crate::TraitEnvironment<'db>>,
) -> Result<Arc<MirBody>, MirLowerError> {
let InternedClosure(owner, _) = db.lookup_intern_closure(closure);
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));

View file

@ -13,7 +13,7 @@ pub(crate) mod inspect;
pub mod interner;
mod ir_print;
pub mod mapping;
mod normalize;
pub mod normalize;
pub mod obligation_ctxt;
mod opaques;
pub mod predicate;

View file

@ -1091,23 +1091,21 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
ItemContainerId::ImplId(it) => it,
_ => panic!("assoc ty value should be in impl"),
};
self.db().ty_ns(id.into())
self.db().ty(id.into())
}
SolverDefId::AdtId(id) => self.db().ty_ns(id.into()),
SolverDefId::AdtId(id) => self.db().ty(id.into()),
// FIXME(next-solver): This uses the types of `query mir_borrowck` in rustc.
//
// We currently always use the type from HIR typeck which ignores regions. This
// should be fine.
SolverDefId::InternedOpaqueTyId(_) => self.type_of_opaque_hir_typeck(def_id),
SolverDefId::FunctionId(id) => self.db.value_ty_ns(id.into()).unwrap(),
SolverDefId::FunctionId(id) => self.db.value_ty(id.into()).unwrap(),
SolverDefId::Ctor(id) => {
let id = match id {
Ctor::Struct(id) => id.into(),
Ctor::Enum(id) => id.into(),
};
self.db
.value_ty_ns(id)
.expect("`SolverDefId::Ctor` should have a function-like ctor")
self.db.value_ty(id).expect("`SolverDefId::Ctor` should have a function-like ctor")
}
_ => panic!("Unexpected def_id `{def_id:?}` provided for `type_of`"),
}
@ -1227,7 +1225,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
self,
def_id: Self::FunctionId,
) -> EarlyBinder<Self, rustc_type_ir::Binder<Self, rustc_type_ir::FnSig<Self>>> {
self.db().callable_item_signature_ns(def_id.0)
self.db().callable_item_signature(def_id.0)
}
fn coroutine_movability(self, def_id: Self::CoroutineId) -> rustc_ast_ir::Movability {
@ -1322,7 +1320,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
let predicates = self.db().generic_predicates_without_parent_ns(def_id.try_into().unwrap());
let predicates = self.db().generic_predicates_without_parent(def_id.try_into().unwrap());
let predicates: Vec<_> = predicates.iter().cloned().collect();
EarlyBinder::bind(predicates.into_iter())
}
@ -1396,7 +1394,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
self,
impl_id: Self::ImplId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
let trait_ref = self.db().impl_trait_ns(impl_id.0).expect("expected an impl of trait");
let trait_ref = self.db().impl_trait(impl_id.0).expect("expected an impl of trait");
trait_ref.map_bound(|trait_ref| {
let clause: Clause<'_> = trait_ref.upcast(self);
Clauses::new_from_iter(
@ -1635,7 +1633,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
|impls| {
for i in impls.for_trait(trait_) {
use rustc_type_ir::TypeVisitable;
let contains_errors = self.db().impl_trait_ns(i).map_or(false, |b| {
let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
});
if contains_errors {
@ -1658,7 +1656,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
for fp in fps {
for i in impls.for_trait_and_self_ty(trait_, *fp) {
use rustc_type_ir::TypeVisitable;
let contains_errors = self.db().impl_trait_ns(i).map_or(false, |b| {
let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
});
if contains_errors {
@ -1704,7 +1702,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
impl_id: Self::ImplId,
) -> EarlyBinder<Self, rustc_type_ir::TraitRef<Self>> {
let db = self.db();
db.impl_trait_ns(impl_id.0)
db.impl_trait(impl_id.0)
// ImplIds for impls where the trait ref can't be resolved should never reach trait solving
.expect("invalid impl passed to trait solver")
}

View file

@ -575,6 +575,17 @@ impl<
}
}
impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner<Interner = Interner>>
NextSolverToChalk<'db, chalk_ir::Binders<U>> for rustc_type_ir::Binder<DbInterner<'db>, T>
{
fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Binders<U> {
chalk_ir::Binders::new(
self.bound_vars().to_chalk(interner),
self.skip_binder().to_chalk(interner),
)
}
}
impl<'db> ChalkToNextSolver<'db, BoundVarKinds> for chalk_ir::VariableKinds<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKinds {
BoundVarKinds::new_from_iter(
@ -584,6 +595,12 @@ impl<'db> ChalkToNextSolver<'db, BoundVarKinds> for chalk_ir::VariableKinds<Inte
}
}
impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKinds<Interner>> for BoundVarKinds {
fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::VariableKinds<Interner> {
chalk_ir::VariableKinds::from_iter(Interner, self.iter().map(|v| v.to_chalk(interner)))
}
}
impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKind {
match self {
@ -594,6 +611,18 @@ impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind<Intern
}
}
impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKind<Interner>> for BoundVarKind {
fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::VariableKind<Interner> {
match self {
BoundVarKind::Ty(_) => chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General),
BoundVarKind::Region(_) => chalk_ir::VariableKind::Lifetime,
BoundVarKind::Const => {
chalk_ir::VariableKind::Const(chalk_ir::TyKind::Error.intern(Interner))
}
}
}
}
impl<'db> ChalkToNextSolver<'db, GenericArg<'db>> for chalk_ir::GenericArg<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArg<'db> {
match self.data(Interner) {
@ -1233,6 +1262,22 @@ where
}
}
impl<'db> NextSolverToChalk<'db, crate::CallableSig> for rustc_type_ir::FnSig<DbInterner<'db>> {
fn to_chalk(self, interner: DbInterner<'db>) -> crate::CallableSig {
crate::CallableSig {
abi: self.abi,
is_varargs: self.c_variadic,
safety: match self.safety {
super::abi::Safety::Safe => chalk_ir::Safety::Safe,
super::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe,
},
params_and_return: triomphe::Arc::from_iter(
self.inputs_and_output.iter().map(|ty| convert_ty_for_result(interner, ty)),
),
}
}
}
pub fn convert_canonical_args_for_result<'db>(
interner: DbInterner<'db>,
args: Canonical<'db, Vec<GenericArg<'db>>>,
@ -1266,7 +1311,7 @@ pub fn convert_args_for_result<'db>(
Substitution::from_iter(Interner, substs)
}
pub(crate) fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> crate::Ty {
pub fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> crate::Ty {
use crate::{Scalar, TyKind};
use chalk_ir::{FloatTy, IntTy, UintTy};
match ty.kind() {

View file

@ -15,7 +15,7 @@ use super::{
interner::{BoundVarKind, DbInterner, Placeholder},
};
type RegionKind<'db> = rustc_type_ir::RegionKind<DbInterner<'db>>;
pub type RegionKind<'db> = rustc_type_ir::RegionKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_, debug)]
pub struct Region<'db> {
@ -53,6 +53,10 @@ impl<'db> Region<'db> {
Region::new(interner, RegionKind::ReVar(v))
}
pub fn new_erased(interner: DbInterner<'db>) -> Region<'db> {
Region::new(interner, RegionKind::ReErased)
}
pub fn is_placeholder(&self) -> bool {
matches!(self.inner(), RegionKind::RePlaceholder(..))
}
@ -61,6 +65,10 @@ impl<'db> Region<'db> {
matches!(self.inner(), RegionKind::ReStatic)
}
pub fn is_var(&self) -> bool {
matches!(self.inner(), RegionKind::ReVar(_))
}
pub fn error(interner: DbInterner<'db>) -> Self {
Region::new(interner, RegionKind::ReError(ErrorGuaranteed))
}

View file

@ -156,16 +156,16 @@ impl<'db> SolverDelegate for SolverContext<'db> {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Unexpected SolverDefId"),
};
let trait_ref = self
let trait_ = self
.0
.interner
.db()
.impl_trait(impl_id.0)
// ImplIds for impls where the trait ref can't be resolved should never reach solver
.expect("invalid impl passed to next-solver")
.into_value_and_skipped_binders()
.skip_binder()
.def_id
.0;
let trait_ = trait_ref.hir_trait_id();
let trait_data = trait_.trait_items(self.0.interner.db());
let id =
impl_id.0.impl_items(self.0.interner.db()).items.iter().find_map(|item| -> Option<_> {

View file

@ -7,6 +7,7 @@ use hir_def::{GenericDefId, TypeOrConstParamId, TypeParamId};
use intern::{Interned, Symbol, sym};
use rustc_abi::{Float, Integer, Size};
use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
use rustc_type_ir::TyVid;
use rustc_type_ir::{
BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid, InferTy,
IntTy, IntVid, Interner, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
@ -336,6 +337,14 @@ impl<'db> Ty<'db> {
matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty())
}
#[inline]
pub fn ty_vid(self) -> Option<TyVid> {
match self.kind() {
TyKind::Infer(rustc_type_ir::TyVar(vid)) => Some(vid),
_ => None,
}
}
/// Given a `fn` type, returns an equivalent `unsafe fn` type;
/// that is, a `fn` type that is equivalent in every way for being
/// unsafe.

View file

@ -511,7 +511,6 @@ impl SomeStruct {
"struct_signature_shim",
"struct_signature_with_source_map_shim",
"attrs_shim",
"type_for_adt_tracked",
]
"#]],
);
@ -609,9 +608,6 @@ fn main() {
"trait_impls_in_crate_shim",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"type_for_adt_tracked",
"impl_trait_with_diagnostics_ns_shim",
"impl_self_ty_with_diagnostics_ns_shim",
"generic_predicates_ns_shim",
"value_ty_shim",
"generic_predicates_shim",
@ -700,8 +696,6 @@ fn main() {
"trait_impls_in_crate_shim",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"impl_trait_with_diagnostics_ns_shim",
"impl_self_ty_with_diagnostics_ns_shim",
"generic_predicates_ns_shim",
"generic_predicates_shim",
]

View file

@ -2050,10 +2050,10 @@ impl dyn Error + Send {
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
// ^^^^ expected Box<dyn Error + '?>, got Box<dyn Error + Send + '?>
// ^^^^ expected Box<dyn Error + '?>, got Box<dyn Error + Send + 'static>
// FIXME, type mismatch should not occur
<dyn Error>::downcast(err).map_err(|_| loop {})
//^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + '?>) -> Result<Box<{unknown}>, Box<dyn Error + '?>>
//^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + 'static>) -> Result<Box<{unknown}>, Box<dyn Error + 'static>>
}
}
"#,

View file

@ -14,8 +14,6 @@ fn test() {
);
}
// FIXME(next-solver): The never type fallback implemented in r-a no longer works properly because of
// `Coerce` predicates. We should reimplement fallback like rustc.
#[test]
fn infer_never2() {
check_types(
@ -26,7 +24,7 @@ fn test() {
let a = gen();
if false { a } else { loop {} };
a;
} //^ {unknown}
} //^ !
"#,
);
}
@ -41,7 +39,7 @@ fn test() {
let a = gen();
if false { loop {} } else { a };
a;
//^ {unknown}
//^ !
}
"#,
);
@ -56,7 +54,7 @@ enum Option<T> { None, Some(T) }
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
} //^ Option<{unknown}>
} //^ Option<!>
"#,
);
}
@ -220,7 +218,7 @@ fn test(a: i32) {
_ => loop {},
};
i;
} //^ {unknown}
} //^ !
"#,
);
}

View file

@ -632,7 +632,7 @@ fn issue_4053_diesel_where_clauses() {
488..522 '{ ... }': <SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> as BoxedDsl<DB>>::Output
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
498..515 'self.o...into()': dyn QueryFragment<DB> + '?
498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
"#]],
);
}
@ -1951,7 +1951,7 @@ fn main() {
Alias::Braced;
//^^^^^^^^^^^^^ {unknown}
let Alias::Braced = loop {};
//^^^^^^^^^^^^^ {unknown}
//^^^^^^^^^^^^^ !
let Alias::Braced(..) = loop {};
//^^^^^^^^^^^^^^^^^ Enum

View file

@ -1,6 +1,6 @@
use expect_test::expect;
use crate::tests::{check_infer, check_no_mismatches};
use crate::tests::{check_infer, check_no_mismatches, check_types};
#[test]
fn regression_20365() {
@ -418,3 +418,57 @@ fn foo() {
"#]],
);
}
#[test]
fn regression_19637() {
check_no_mismatches(
r#"
//- minicore: coerce_unsized
pub trait Any {}
impl<T: 'static> Any for T {}
pub trait Trait: Any {
type F;
}
pub struct TT {}
impl Trait for TT {
type F = f32;
}
pub fn coercion(x: &mut dyn Any) -> &mut dyn Any {
x
}
fn main() {
let mut t = TT {};
let tt = &mut t as &mut dyn Trait<F = f32>;
let st = coercion(tt);
}
"#,
);
}
#[test]
fn double_into_iter() {
check_types(
r#"
//- minicore: iterator
fn intoiter_issue<A, B>(foo: A)
where
A: IntoIterator<Item = B>,
B: IntoIterator<Item = usize>,
{
for x in foo {
// ^ B
for m in x {
// ^ usize
}
}
}
"#,
);
}

View file

@ -1487,8 +1487,8 @@ fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
268..269 'x': Box<dyn Trait<u64> + '?>
275..276 'y': &'? (dyn Trait<u64> + '?)
286..287 'z': Box<dyn Trait<u64> + '?>
290..293 'bar': fn bar() -> Box<dyn Trait<u64> + '?>
290..295 'bar()': Box<dyn Trait<u64> + '?>
290..293 'bar': fn bar() -> Box<dyn Trait<u64> + 'static>
290..295 'bar()': Box<dyn Trait<u64> + 'static>
301..302 'x': Box<dyn Trait<u64> + '?>
301..308 'x.foo()': u64
314..315 'y': &'? (dyn Trait<u64> + '?)
@ -1535,7 +1535,7 @@ fn test(s: S<u32, i32>) {
251..252 's': S<u32, i32>
267..289 '{ ...z(); }': ()
273..274 's': S<u32, i32>
273..280 's.bar()': &'? (dyn Trait<u32, i32> + '?)
273..280 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
273..286 's.bar().baz()': (u32, i32)
"#]],
);
@ -1568,8 +1568,8 @@ fn test(x: Trait, y: &Trait) -> u64 {
106..107 'x': dyn Trait + '?
113..114 'y': &'? (dyn Trait + '?)
124..125 'z': dyn Trait + '?
128..131 'bar': fn bar() -> dyn Trait + '?
128..133 'bar()': dyn Trait + '?
128..131 'bar': fn bar() -> dyn Trait + 'static
128..133 'bar()': dyn Trait + 'static
139..140 'x': dyn Trait + '?
139..146 'x.foo()': u64
152..153 'y': &'? (dyn Trait + '?)
@ -1597,7 +1597,7 @@ fn main() {
47..48 '_': &'? (dyn Fn(S) + '?)
58..60 '{}': ()
71..105 '{ ...()); }': ()
77..78 'f': fn f(&'? (dyn Fn(S) + '?))
77..78 'f': fn f(&'? (dyn Fn(S) + 'static))
77..102 'f(&|nu...foo())': ()
79..101 '&|numb....foo()': &'? impl Fn(S)
80..101 '|numbe....foo()': impl Fn(S)
@ -2952,7 +2952,7 @@ fn test(x: &dyn Foo) {
34..36 '{}': ()
46..47 'x': &'? (dyn Foo + '?)
59..74 '{ foo(x); }': ()
65..68 'foo': fn foo(&'? (dyn Foo + '?))
65..68 'foo': fn foo(&'? (dyn Foo + 'static))
65..71 'foo(x)': ()
69..70 'x': &'? (dyn Foo + '?)
"#]],

View file

@ -1,4 +1,4 @@
//! Trait solving using Chalk.
//! Trait solving using next trait solver.
use core::fmt;
use std::hash::Hash;
@ -25,7 +25,7 @@ use crate::{
db::HirDatabase,
infer::unify::InferenceTable,
next_solver::{
DbInterner, GenericArg, Predicate, SolverContext, Span,
DbInterner, GenericArg, ParamEnv, Predicate, SolverContext, Span,
infer::{DbInternerInferExt, InferCtxt},
mapping::{ChalkToNextSolver, convert_canonical_args_for_result},
util::mini_canonicalize,
@ -39,21 +39,21 @@ use crate::{
/// ```
/// we assume that `T: Default`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TraitEnvironment {
pub struct TraitEnvironment<'db> {
pub krate: Crate,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>,
pub env: ParamEnv<'db>,
}
impl TraitEnvironment {
impl<'db> TraitEnvironment<'db> {
pub fn empty(krate: Crate) -> Arc<Self> {
Arc::new(TraitEnvironment {
krate,
block: None,
traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner),
env: ParamEnv::empty(),
})
}
@ -61,7 +61,7 @@ impl TraitEnvironment {
krate: Crate,
block: Option<BlockId>,
traits_from_clauses: Box<[(Ty, TraitId)]>,
env: chalk_ir::Environment<Interner>,
env: ParamEnv<'db>,
) -> Arc<Self> {
Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
}
@ -78,10 +78,10 @@ impl TraitEnvironment {
}
}
pub(crate) fn normalize_projection_query(
db: &dyn HirDatabase,
pub(crate) fn normalize_projection_query<'db>(
db: &'db dyn HirDatabase,
projection: ProjectionTy,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
) -> Ty {
if projection.substitution.iter(Interner).any(|arg| {
arg.ty(Interner)
@ -128,7 +128,7 @@ fn identity_subst(
chalk_ir::Canonical { binders, value: identity_subst }
}
/// Solve a trait goal using Chalk.
/// Solve a trait goal using next trait solver.
pub(crate) fn trait_solve_query(
db: &dyn HirDatabase,
krate: Crate,
@ -325,7 +325,7 @@ pub fn next_trait_solve_canonical_in_ctxt<'db>(
}
}
/// Solve a trait goal using Chalk.
/// Solve a trait goal using next trait solver.
pub fn next_trait_solve_in_ctxt<'db, 'a>(
infer_ctxt: &'a InferCtxt<'db>,
goal: crate::next_solver::Goal<'db, crate::next_solver::Predicate<'db>>,

View file

@ -15,6 +15,8 @@
use crate::db::HirDatabase;
use crate::generics::{Generics, generics};
use crate::next_solver::DbInterner;
use crate::next_solver::mapping::{ChalkToNextSolver, NextSolverToChalk};
use crate::{
AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime,
LifetimeData, Ty, TyKind,
@ -238,14 +240,15 @@ impl Context<'_> {
}
GenericDefId::FunctionId(f) => {
let subst = self.generics.placeholder_subst(self.db);
self.add_constraints_from_sig(
self.db
.callable_item_signature(f.into())
.substitute(Interner, &subst)
.params_and_return
.iter(),
Variance::Covariant,
);
let interner = DbInterner::new_with(self.db, None, None);
let args: crate::next_solver::GenericArgs<'_> = subst.to_nextsolver(interner);
let sig = self
.db
.callable_item_signature(f.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
self.add_constraints_from_sig(sig.params_and_return.iter(), Variance::Covariant);
}
_ => {}
}

View file

@ -24,7 +24,7 @@ use crate::{
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum,
ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam,
Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitRef, TupleField, TyBuilder,
Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
Type, TypeAlias, TypeNs, TypeOrConstParam, TypeParam, Union, Variant,
};
impl HirDisplay for Function {
@ -437,6 +437,12 @@ impl HirDisplay for Type<'_> {
}
}
impl HirDisplay for TypeNs<'_> {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
self.ty.hir_fmt(f)
}
}
impl HirDisplay for ExternCrateDecl {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;

View file

@ -82,7 +82,9 @@ use hir_ty::{
method_resolution,
mir::{MutBorrowKind, interpret_mir},
next_solver::{
ClauseKind, DbInterner, GenericArgs, infer::InferCtxt, mapping::ChalkToNextSolver,
ClauseKind, DbInterner, GenericArgs,
infer::InferCtxt,
mapping::{ChalkToNextSolver, NextSolverToChalk, convert_ty_for_result},
},
primitive::UintTy,
traits::FnTrait,
@ -863,10 +865,13 @@ impl Module {
.collect();
if !missing.is_empty() {
let self_ty = db.impl_self_ty(impl_def.id).substitute(
Interner,
&hir_ty::generics::generics(db, impl_def.id.into()).placeholder_subst(db),
);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> =
hir_ty::generics::generics(db, impl_def.id.into())
.placeholder_subst(db)
.to_nextsolver(interner);
let self_ty =
db.impl_self_ty(impl_def.id).instantiate(interner, args).to_chalk(interner);
let self_ty = if let TyKind::Alias(AliasTy::Projection(projection)) =
self_ty.kind(Interner)
{
@ -1342,19 +1347,12 @@ impl Field {
u32::from(self.id.into_raw()) as usize
}
/// Returns the type as in the signature of the struct (i.e., with
/// placeholder types for type parameters). Only use this in the context of
/// the field definition.
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
/// Returns the type as in the signature of the struct. Only use this in the
/// context of the field definition.
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> {
let var_id = self.parent.into();
let generic_def_id: GenericDefId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
VariantDef::Union(it) => it.id.into(),
VariantDef::Variant(it) => it.id.lookup(db).parent.into(),
};
let substs = TyBuilder::placeholder_subst(db, generic_def_id);
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
Type::new(db, var_id, ty)
let ty = db.field_types_ns(var_id)[self.id].skip_binder();
TypeNs::new(db, var_id, ty)
}
// FIXME: Find better API to also handle const generics
@ -1384,9 +1382,8 @@ impl Field {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
let interner = DbInterner::new_with(db, None, None);
db.layout_of_ty(
self.ty(db).ty.to_nextsolver(interner),
self.ty(db).ty,
db.trait_environment(match hir_def::VariantId::from(self.parent) {
hir_def::VariantId::EnumVariantId(id) => {
GenericDefId::AdtId(id.lookup(db).parent.into())
@ -1506,7 +1503,7 @@ impl<'db> InstantiatedStruct<'db> {
let krate = self.inner.krate(db);
let interner = DbInterner::new_with(db, Some(krate.base()), None);
let ty = db.ty_ns(self.inner.id.into());
let ty = db.ty(self.inner.id.into());
TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args))
}
}
@ -1666,7 +1663,7 @@ impl<'db> InstantiatedEnum<'db> {
let krate = self.inner.krate(db);
let interner = DbInterner::new_with(db, Some(krate.base()), None);
let ty = db.ty_ns(self.inner.id.into());
let ty = db.ty(self.inner.id.into());
TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args))
}
}
@ -1853,7 +1850,8 @@ impl Adt {
ParamKind::Lifetime => error_lifetime().cast(Interner),
}
})
.build();
.build(DbInterner::conjure())
.to_chalk(DbInterner::conjure());
Type::new(db, id, ty)
}
@ -2288,7 +2286,13 @@ impl Function {
pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let ty = TyKind::Function(callable_sig.to_fn_ptr()).intern(Interner);
Type::new_with_resolver_inner(db, &resolver, ty)
}
@ -2297,8 +2301,14 @@ impl Function {
pub fn ret_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = callable_sig.ret().clone();
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.output()
.to_chalk(interner);
Type::new_with_resolver_inner(db, &resolver, ty)
}
@ -2327,8 +2337,14 @@ impl Function {
parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build());
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = callable_sig.ret().clone();
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.output()
.to_chalk(interner);
Type::new_with_resolver_inner(db, &resolver, ty)
}
@ -2338,8 +2354,14 @@ impl Function {
}
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ret_ty = callable_sig.ret().clone();
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ret_ty = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.output()
.to_chalk(interner);
for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
@ -2359,7 +2381,13 @@ impl Function {
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
callable_sig
.params()
.iter()
@ -2387,7 +2415,13 @@ impl Function {
pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
callable_sig
.params()
@ -2437,7 +2471,13 @@ impl Function {
GenericArg::new(Interner, GenericArgData::Ty(ty))
})
.build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.id.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
callable_sig
.params()
@ -2732,8 +2772,13 @@ impl SelfParam {
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let substs = TyBuilder::placeholder_subst(db, self.func);
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.func.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
@ -2765,8 +2810,13 @@ impl SelfParam {
let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build();
let substs =
TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build();
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let interner = DbInterner::new_with(db, None, None);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let callable_sig = db
.callable_item_signature(self.func.into())
.instantiate(interner, args)
.skip_binder()
.to_chalk(interner);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
@ -3770,7 +3820,7 @@ impl GenericDef {
push_ty_diagnostics(
db,
acc,
db.generic_predicates_without_parent_with_diagnostics_ns(def).1,
db.generic_predicates_without_parent_with_diagnostics(def).1,
&source_map,
);
for (param_id, param) in generics.iter_type_or_consts() {
@ -3810,12 +3860,12 @@ impl GenericDef {
pub struct GenericSubstitution<'db> {
def: GenericDefId,
subst: Substitution,
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
_pd: PhantomCovariantLifetime<'db>,
}
impl<'db> GenericSubstitution<'db> {
fn new(def: GenericDefId, subst: Substitution, env: Arc<TraitEnvironment>) -> Self {
fn new(def: GenericDefId, subst: Substitution, env: Arc<TraitEnvironment<'db>>) -> Self {
Self { def, subst, env, _pd: PhantomCovariantLifetime::new() }
}
@ -4495,21 +4545,23 @@ impl Impl {
}
pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
let trait_ref = db.impl_trait_ns(self.id)?;
let trait_ref = db.impl_trait(self.id)?;
let id = trait_ref.skip_binder().def_id;
Some(Trait { id: id.0 })
}
pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef<'_>> {
let trait_ref = db.impl_trait_ns(self.id)?.instantiate_identity();
let trait_ref = db.impl_trait(self.id)?.instantiate_identity();
let resolver = self.id.resolver(db);
Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
}
pub fn self_ty(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let substs = TyBuilder::placeholder_subst(db, self.id);
let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = db.impl_self_ty(self.id).instantiate(interner, args).to_chalk(interner);
Type::new_with_resolver_inner(db, &resolver, ty)
}
@ -4569,7 +4621,7 @@ impl Impl {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TraitRef<'db> {
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
trait_ref: hir_ty::next_solver::TraitRef<'db>,
_pd: PhantomCovariantLifetime<'db>,
}
@ -4792,7 +4844,7 @@ impl CaptureUsageSource {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Type<'db> {
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
ty: Ty,
_pd: PhantomCovariantLifetime<'db>,
}
@ -4830,32 +4882,40 @@ impl<'db> Type<'db> {
}
fn from_def(db: &'db dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Self {
let interner = DbInterner::new_with(db, None, None);
let ty = db.ty(def.into());
let substs = TyBuilder::unknown_subst(
db,
match def.into() {
TyDefId::AdtId(it) => GenericDefId::AdtId(it),
TyDefId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
TyDefId::BuiltinType(_) => return Type::new(db, def, ty.skip_binders().clone()),
TyDefId::BuiltinType(_) => {
return Type::new(db, def, ty.skip_binder().to_chalk(interner));
}
},
);
Type::new(db, def, ty.substitute(Interner, &substs))
let args: hir_ty::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
Type::new(db, def, ty.instantiate(interner, args).to_chalk(interner))
}
fn from_def_placeholders(
db: &'db dyn HirDatabase,
def: impl Into<TyDefId> + HasResolver,
) -> Self {
let interner = DbInterner::new_with(db, None, None);
let ty = db.ty(def.into());
let substs = TyBuilder::placeholder_subst(
db,
match def.into() {
TyDefId::AdtId(it) => GenericDefId::AdtId(it),
TyDefId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
TyDefId::BuiltinType(_) => return Type::new(db, def, ty.skip_binders().clone()),
TyDefId::BuiltinType(_) => {
return Type::new(db, def, ty.skip_binder().to_chalk(interner));
}
},
);
Type::new(db, def, ty.substitute(Interner, &substs))
let args: hir_ty::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
Type::new(db, def, ty.instantiate(interner, args).to_chalk(interner))
}
fn from_value_def(
@ -4865,6 +4925,7 @@ impl<'db> Type<'db> {
let Some(ty) = db.value_ty(def.into()) else {
return Type::new(db, def, TyKind::Error.intern(Interner));
};
let interner = DbInterner::new_with(db, None, None);
let substs = TyBuilder::unknown_subst(
db,
match def.into() {
@ -4875,10 +4936,13 @@ impl<'db> Type<'db> {
ValueTyDefId::EnumVariantId(it) => {
GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
}
ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()),
ValueTyDefId::StaticId(_) => {
return Type::new(db, def, ty.skip_binder().to_chalk(interner));
}
},
);
Type::new(db, def, ty.substitute(Interner, &substs))
let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
Type::new(db, def, ty.instantiate(interner, args).to_chalk(interner))
}
pub fn new_slice(ty: Self) -> Self {
@ -5173,7 +5237,14 @@ impl<'db> Type<'db> {
.build();
let goal = Canonical {
value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
value: hir_ty::InEnvironment::new(
&self.env.env.to_chalk(DbInterner::new_with(
db,
Some(self.env.krate),
self.env.block,
)),
trait_ref.cast(Interner),
),
binders: CanonicalVarKinds::empty(Interner),
};
@ -5947,7 +6018,7 @@ impl<'db> Type<'db> {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeNs<'db> {
env: Arc<TraitEnvironment>,
env: Arc<TraitEnvironment<'db>>,
ty: hir_ty::next_solver::Ty<'db>,
_pd: PhantomCovariantLifetime<'db>,
}
@ -5965,6 +6036,11 @@ impl<'db> TypeNs<'db> {
TypeNs { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
pub fn to_type(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let interner = DbInterner::new_with(db, Some(self.env.krate), self.env.block);
Type { env: self.env.clone(), ty: convert_ty_for_result(interner, self.ty), _pd: self._pd }
}
// FIXME: Find better API that also handles const generics
pub fn impls_trait(&self, infcx: InferCtxt<'db>, trait_: Trait, args: &[TypeNs<'db>]) -> bool {
let args = GenericArgs::new_from_iter(
@ -5988,6 +6064,10 @@ impl<'db> TypeNs<'db> {
let res = hir_ty::traits::next_trait_solve_in_ctxt(&infcx, goal);
res.map_or(false, |res| matches!(res.1, rustc_type_ir::solve::Certainty::Yes))
}
pub fn is_bool(&self) -> bool {
matches!(self.ty.kind(), rustc_type_ir::TyKind::Bool)
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]

View file

@ -46,6 +46,10 @@ use hir_ty::{
from_assoc_type_id,
lang_items::lang_items_for_bin_op,
method_resolution,
next_solver::{
DbInterner,
mapping::{ChalkToNextSolver, NextSolverToChalk},
},
};
use intern::sym;
use itertools::Itertools;
@ -219,7 +223,7 @@ impl<'db> SourceAnalyzer<'db> {
})
}
fn trait_environment(&self, db: &'db dyn HirDatabase) -> Arc<TraitEnvironment> {
fn trait_environment(&self, db: &'db dyn HirDatabase) -> Arc<TraitEnvironment<'db>> {
self.body_().map(|(def, ..)| def).map_or_else(
|| TraitEnvironment::empty(self.resolver.krate()),
|def| db.trait_environment_for_body(def),
@ -372,8 +376,10 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<Callable<'db>> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let (func, substs) = self.infer()?.method_resolution(expr_id)?;
let ty = db.value_ty(func.into())?.substitute(Interner, &substs);
let ty = Type::new_with_resolver(db, &self.resolver, ty);
let interner = DbInterner::new_with(db, None, None);
let args: hir_ty::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner);
let ty = db.value_ty(func.into())?.instantiate(interner, args);
let ty = Type::new_with_resolver(db, &self.resolver, ty.to_chalk(interner));
let mut res = ty.as_callable(db)?;
res.is_bound_method = true;
Some(res)

View file

@ -1,3 +1,4 @@
use either::Either;
use syntax::{
AstNode,
ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory},
@ -59,7 +60,8 @@ enum ParentType {
}
fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> {
if let Some(match_arm) = ctx.find_node_at_offset::<ast::MatchArm>() {
let node = ctx.find_node_at_offset::<Either<ast::MatchArm, ast::ClosureExpr>>()?;
if let Either::Left(match_arm) = &node {
let match_arm_expr = match_arm.expr()?;
if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) {
@ -67,7 +69,7 @@ fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Exp
}
return Some((ParentType::MatchArmExpr, match_arm_expr));
} else if let Some(closure_expr) = ctx.find_node_at_offset::<ast::ClosureExpr>() {
} else if let Either::Right(closure_expr) = &node {
let body = closure_expr.body()?;
if matches!(body, ast::Expr::BlockExpr(_)) {
@ -105,6 +107,33 @@ fn foo() {
);
}
#[test]
fn suggest_add_braces_for_closure_in_match() {
check_assist(
add_braces,
r#"
fn foo() {
match () {
() => {
t(|n|$0 n + 100);
}
}
}
"#,
r#"
fn foo() {
match () {
() => {
t(|n| {
n + 100
});
}
}
}
"#,
);
}
#[test]
fn no_assist_for_closures_with_braces() {
check_assist_not_applicable(

View file

@ -521,7 +521,7 @@ fn build_pat(
hir::StructKind::Tuple => {
let mut name_generator = suggest_name::NameGenerator::default();
let pats = fields.into_iter().map(|f| {
let name = name_generator.for_type(&f.ty(db), db, edition);
let name = name_generator.for_type(&f.ty(db).to_type(db), db, edition);
match name {
Some(name) => make::ext::simple_ident_pat(make.name(&name)).into(),
None => make.wildcard_pat().into(),

View file

@ -6,7 +6,7 @@ use ide_db::{
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use syntax::{
SyntaxKind, T,
NodeOrToken, SyntaxKind, T,
ast::{
self, AstNode,
Expr::BinExpr,
@ -38,15 +38,27 @@ use crate::{AssistContext, AssistId, Assists, utils::invert_boolean_expression};
// }
// ```
pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let mut bin_expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
let mut bin_expr = if let Some(not) = ctx.find_token_syntax_at_offset(T![!])
&& let Some(NodeOrToken::Node(next)) = not.next_sibling_or_token()
&& let Some(paren) = ast::ParenExpr::cast(next)
&& let Some(ast::Expr::BinExpr(bin_expr)) = paren.expr()
{
bin_expr
} else {
let bin_expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
let op_range = bin_expr.op_token()?.text_range();
// Is the cursor on the expression's logical operator?
if !op_range.contains_range(ctx.selection_trimmed()) {
return None;
}
bin_expr
};
let op = bin_expr.op_kind()?;
let op_range = bin_expr.op_token()?.text_range();
// Is the cursor on the expression's logical operator?
if !op_range.contains_range(ctx.selection_trimmed()) {
return None;
}
// Walk up the tree while we have the same binary operator
while let Some(parent_expr) = bin_expr.syntax().parent().and_then(ast::BinExpr::cast) {
match parent_expr.op_kind() {
@ -366,6 +378,15 @@ fn f() { !(S <= S || S < S) }
)
}
#[test]
fn demorgan_on_not() {
check_assist(
apply_demorgan,
"fn f() { $0!(1 || 3 && 4 || 5) }",
"fn f() { !1 && !(3 && 4) && !5 }",
)
}
#[test]
fn demorgan_keep_pars_for_op_precedence() {
check_assist(

View file

@ -2,7 +2,7 @@ use crate::assist_context::{AssistContext, Assists};
use ide_db::{LineIndexDatabase, assists::AssistId, defs::Definition};
use syntax::{
AstNode,
ast::{self, edit_in_place::Indent},
ast::{self, HasName, edit_in_place::Indent},
};
// Assist: bind_unused_param
@ -22,6 +22,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let param: ast::Param = ctx.find_node_at_offset()?;
let Some(ast::Pat::IdentPat(ident_pat)) = param.pat() else { return None };
let name = ident_pat.name().filter(|n| !n.text().starts_with('_'))?;
let param_def = {
let local = ctx.sema.to_def(&ident_pat)?;
@ -39,14 +40,14 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
acc.add(
AssistId::quick_fix("bind_unused_param"),
format!("Bind as `let _ = {ident_pat};`"),
format!("Bind as `let _ = {name};`"),
param.syntax().text_range(),
|builder| {
let line_index = ctx.db().line_index(ctx.vfs_file_id());
let indent = func.indent_level();
let text_indent = indent + 1;
let mut text = format!("\n{text_indent}let _ = {ident_pat};");
let mut text = format!("\n{text_indent}let _ = {name};");
let left_line = line_index.line_col(l_curly_range.end()).line;
let right_line = line_index.line_col(r_curly_range.start()).line;
@ -83,6 +84,22 @@ fn foo(y: i32) {
);
}
#[test]
fn bind_unused_ref_ident_pat() {
cov_mark::check!(single_line);
check_assist(
bind_unused_param,
r#"
fn foo(ref $0y: i32) {}
"#,
r#"
fn foo(ref y: i32) {
let _ = y;
}
"#,
);
}
#[test]
fn bind_unused_empty_block_with_newline() {
check_assist(
@ -149,6 +166,16 @@ impl Trait for () {
bind_unused_param,
r#"
fn foo(x: i32, $0y: i32) { y; }
"#,
);
}
#[test]
fn keep_underscore_used() {
check_assist_not_applicable(
bind_unused_param,
r#"
fn foo($0_x: i32, y: i32) {}
"#,
);
}

View file

@ -1,13 +1,12 @@
use std::iter::once;
use ide_db::{
syntax_helpers::node_ext::{is_pattern_cond, single_let},
ty_filter::TryEnum,
};
use either::Either;
use hir::{Semantics, TypeInfo};
use ide_db::{RootDatabase, ty_filter::TryEnum};
use syntax::{
AstNode,
SyntaxKind::{FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
T,
SyntaxKind::{CLOSURE_EXPR, FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
SyntaxNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
@ -44,12 +43,9 @@ use crate::{
// }
// ```
pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
if let Some(let_stmt) = ctx.find_node_at_offset() {
let_stmt_to_guarded_return(let_stmt, acc, ctx)
} else if let Some(if_expr) = ctx.find_node_at_offset() {
if_expr_to_guarded_return(if_expr, acc, ctx)
} else {
None
match ctx.find_node_at_offset::<Either<ast::LetStmt, ast::IfExpr>>()? {
Either::Left(let_stmt) => let_stmt_to_guarded_return(let_stmt, acc, ctx),
Either::Right(if_expr) => if_expr_to_guarded_return(if_expr, acc, ctx),
}
}
@ -73,13 +69,7 @@ fn if_expr_to_guarded_return(
return None;
}
// Check if there is an IfLet that we can handle.
let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
let let_ = single_let(cond)?;
(Some(let_.pat()?), let_.expr()?)
} else {
(None, cond)
};
let let_chains = flat_let_chain(cond);
let then_block = if_expr.then_branch()?;
let then_block = then_block.stmt_list()?;
@ -106,11 +96,7 @@ fn if_expr_to_guarded_return(
let parent_container = parent_block.syntax().parent()?;
let early_expression: ast::Expr = match parent_container.kind() {
WHILE_EXPR | LOOP_EXPR | FOR_EXPR => make::expr_continue(None),
FN => make::expr_return(None),
_ => return None,
};
let early_expression: ast::Expr = early_expression(parent_container, &ctx.sema)?;
then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{'])?;
@ -132,32 +118,42 @@ fn if_expr_to_guarded_return(
target,
|edit| {
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
let replacement = match if_let_pat {
None => {
// If.
let new_expr = {
let then_branch =
make::block_expr(once(make::expr_stmt(early_expression).into()), None);
let cond = invert_boolean_expression_legacy(cond_expr);
make::expr_if(cond, then_branch, None).indent(if_indent_level)
};
new_expr.syntax().clone()
}
Some(pat) => {
let replacement = let_chains.into_iter().map(|expr| {
if let ast::Expr::LetExpr(let_expr) = &expr
&& let (Some(pat), Some(expr)) = (let_expr.pat(), let_expr.expr())
{
// If-let.
let let_else_stmt = make::let_else_stmt(
pat,
None,
cond_expr,
ast::make::tail_only_block_expr(early_expression),
expr,
ast::make::tail_only_block_expr(early_expression.clone()),
);
let let_else_stmt = let_else_stmt.indent(if_indent_level);
let_else_stmt.syntax().clone()
} else {
// If.
let new_expr = {
let then_branch = make::block_expr(
once(make::expr_stmt(early_expression.clone()).into()),
None,
);
let cond = invert_boolean_expression_legacy(expr);
make::expr_if(cond, then_branch, None).indent(if_indent_level)
};
new_expr.syntax().clone()
}
};
});
let newline = &format!("\n{if_indent_level}");
let then_statements = replacement
.children_with_tokens()
.enumerate()
.flat_map(|(i, node)| {
(i != 0)
.then(|| make::tokens::whitespace(newline).into())
.into_iter()
.chain(node.children_with_tokens())
})
.chain(
then_block_items
.syntax()
@ -201,11 +197,7 @@ fn let_stmt_to_guarded_return(
let_stmt.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
let parent_container = parent_block.syntax().parent()?;
match parent_container.kind() {
WHILE_EXPR | LOOP_EXPR | FOR_EXPR => make::expr_continue(None),
FN => make::expr_return(None),
_ => return None,
}
early_expression(parent_container, &ctx.sema)?
};
acc.add(
@ -232,6 +224,54 @@ fn let_stmt_to_guarded_return(
)
}
fn early_expression(
parent_container: SyntaxNode,
sema: &Semantics<'_, RootDatabase>,
) -> Option<ast::Expr> {
let return_none_expr = || {
let none_expr = make::expr_path(make::ext::ident_path("None"));
make::expr_return(Some(none_expr))
};
if let Some(fn_) = ast::Fn::cast(parent_container.clone())
&& let Some(fn_def) = sema.to_def(&fn_)
&& let Some(TryEnum::Option) = TryEnum::from_ty(sema, &fn_def.ret_type(sema.db))
{
return Some(return_none_expr());
}
if let Some(body) = ast::ClosureExpr::cast(parent_container.clone()).and_then(|it| it.body())
&& let Some(ret_ty) = sema.type_of_expr(&body).map(TypeInfo::original)
&& let Some(TryEnum::Option) = TryEnum::from_ty(sema, &ret_ty)
{
return Some(return_none_expr());
}
Some(match parent_container.kind() {
WHILE_EXPR | LOOP_EXPR | FOR_EXPR => make::expr_continue(None),
FN | CLOSURE_EXPR => make::expr_return(None),
_ => return None,
})
}
fn flat_let_chain(mut expr: ast::Expr) -> Vec<ast::Expr> {
let mut chains = vec![];
while let ast::Expr::BinExpr(bin_expr) = &expr
&& bin_expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And))
&& let (Some(lhs), Some(rhs)) = (bin_expr.lhs(), bin_expr.rhs())
{
if let Some(last) = chains.pop_if(|last| !matches!(last, ast::Expr::LetExpr(_))) {
chains.push(make::expr_bin_op(rhs, ast::BinaryOp::LogicOp(ast::LogicOp::And), last));
} else {
chains.push(rhs);
}
expr = lhs;
}
chains.push(expr);
chains.reverse();
chains
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -268,6 +308,71 @@ fn main() {
);
}
#[test]
fn convert_inside_fn_return_option() {
check_assist(
convert_to_guarded_return,
r#"
//- minicore: option
fn ret_option() -> Option<()> {
bar();
if$0 true {
foo();
// comment
bar();
}
}
"#,
r#"
fn ret_option() -> Option<()> {
bar();
if false {
return None;
}
foo();
// comment
bar();
}
"#,
);
}
#[test]
fn convert_inside_closure() {
check_assist(
convert_to_guarded_return,
r#"
fn main() {
let _f = || {
bar();
if$0 true {
foo();
// comment
bar();
}
}
}
"#,
r#"
fn main() {
let _f = || {
bar();
if false {
return;
}
foo();
// comment
bar();
}
}
"#,
);
}
#[test]
fn convert_let_inside_fn() {
check_assist(
@ -316,6 +421,82 @@ fn main() {
);
}
#[test]
fn convert_if_let_result_inside_let() {
check_assist(
convert_to_guarded_return,
r#"
fn main() {
let _x = loop {
if$0 let Ok(x) = Err(92) {
foo(x);
}
};
}
"#,
r#"
fn main() {
let _x = loop {
let Ok(x) = Err(92) else { continue };
foo(x);
};
}
"#,
);
}
#[test]
fn convert_if_let_chain_result() {
check_assist(
convert_to_guarded_return,
r#"
fn main() {
if$0 let Ok(x) = Err(92)
&& x < 30
&& let Some(y) = Some(8)
{
foo(x, y);
}
}
"#,
r#"
fn main() {
let Ok(x) = Err(92) else { return };
if x >= 30 {
return;
}
let Some(y) = Some(8) else { return };
foo(x, y);
}
"#,
);
check_assist(
convert_to_guarded_return,
r#"
fn main() {
if$0 let Ok(x) = Err(92)
&& x < 30
&& y < 20
&& let Some(y) = Some(8)
{
foo(x, y);
}
}
"#,
r#"
fn main() {
let Ok(x) = Err(92) else { return };
if !(x < 30 && y < 20) {
return;
}
let Some(y) = Some(8) else { return };
foo(x, y);
}
"#,
);
}
#[test]
fn convert_let_ok_inside_fn() {
check_assist(
@ -560,6 +741,32 @@ fn main() {
);
}
#[test]
fn convert_let_stmt_inside_fn_return_option() {
check_assist(
convert_to_guarded_return,
r#"
//- minicore: option
fn foo() -> Option<i32> {
None
}
fn ret_option() -> Option<i32> {
let x$0 = foo();
}
"#,
r#"
fn foo() -> Option<i32> {
None
}
fn ret_option() -> Option<i32> {
let Some(x) = foo() else { return None };
}
"#,
);
}
#[test]
fn convert_let_stmt_inside_loop() {
check_assist(

View file

@ -7,7 +7,7 @@ use ide_db::{
search::{FileReference, SearchScope},
};
use itertools::Itertools;
use syntax::ast::syntax_factory::SyntaxFactory;
use syntax::ast::{HasName, syntax_factory::SyntaxFactory};
use syntax::syntax_editor::SyntaxEditor;
use syntax::{AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr, ast};
@ -71,13 +71,14 @@ fn destructure_struct_binding_impl(
struct StructEditData {
ident_pat: ast::IdentPat,
name: ast::Name,
kind: hir::StructKind,
struct_def_path: hir::ModPath,
visible_fields: Vec<hir::Field>,
usages: Vec<FileReference>,
names_in_scope: FxHashSet<SmolStr>,
has_private_members: bool,
is_nested: bool,
need_record_field_name: bool,
is_ref: bool,
edition: Edition,
}
@ -114,7 +115,11 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
}
let is_ref = ty.is_reference();
let is_nested = ident_pat.syntax().parent().and_then(ast::RecordPatField::cast).is_some();
let need_record_field_name = ident_pat
.syntax()
.parent()
.and_then(ast::RecordPatField::cast)
.is_some_and(|field| field.colon_token().is_none());
let usages = ctx
.sema
@ -133,6 +138,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
let names_in_scope = get_names_in_scope(ctx, &ident_pat, &usages).unwrap_or_default();
Some(StructEditData {
name: ident_pat.name()?,
ident_pat,
kind,
struct_def_path,
@ -140,7 +146,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
has_private_members,
visible_fields,
names_in_scope,
is_nested,
need_record_field_name,
is_ref,
edition: module.krate().edition(ctx.db()),
})
@ -177,6 +183,7 @@ fn destructure_pat(
field_names: &[(SmolStr, SmolStr)],
) {
let ident_pat = &data.ident_pat;
let name = &data.name;
let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition);
let is_ref = ident_pat.ref_token().is_some();
@ -194,9 +201,9 @@ fn destructure_pat(
hir::StructKind::Record => {
let fields = field_names.iter().map(|(old_name, new_name)| {
// Use shorthand syntax if possible
if old_name == new_name && !is_mut {
if old_name == new_name {
make.record_pat_field_shorthand(
make.ident_pat(false, false, make.name(old_name)).into(),
make.ident_pat(is_ref, is_mut, make.name(old_name)).into(),
)
} else {
make.record_pat_field(
@ -215,8 +222,8 @@ fn destructure_pat(
// If the binding is nested inside a record, we need to wrap the new
// destructured pattern in a non-shorthand record field
let destructured_pat = if data.is_nested {
make.record_pat_field(make.name_ref(&ident_pat.to_string()), new_pat).syntax().clone()
let destructured_pat = if data.need_record_field_name {
make.record_pat_field(make.name_ref(&name.to_string()), new_pat).syntax().clone()
} else {
new_pat.syntax().clone()
};
@ -288,7 +295,7 @@ fn build_usage_edit(
Some(field_expr) => Some({
let field_name: SmolStr = field_expr.name_ref()?.to_string().into();
let new_field_name = field_names.get(&field_name)?;
let new_expr = make.expr_path(ast::make::ext::ident_path(new_field_name));
let new_expr = ast::make::expr_path(ast::make::ext::ident_path(new_field_name));
// If struct binding is a reference, we might need to deref field usages
if data.is_ref {
@ -298,7 +305,7 @@ fn build_usage_edit(
ref_data.wrap_expr(new_expr).syntax().clone_for_update(),
)
} else {
(field_expr.syntax().clone(), new_expr.syntax().clone())
(field_expr.syntax().clone(), new_expr.syntax().clone_for_update())
}
}),
None => Some((
@ -579,7 +586,7 @@ mod tests {
struct Foo { bar: i32, baz: i32 }
fn main() {
let Foo { bar: mut bar, baz: mut baz } = Foo { bar: 1, baz: 2 };
let Foo { mut bar, mut baz } = Foo { bar: 1, baz: 2 };
let bar2 = bar;
let baz2 = &baz;
}
@ -587,6 +594,86 @@ mod tests {
)
}
#[test]
fn mut_record_field() {
check_assist(
destructure_struct_binding,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { mut $0foo }: Bar) {}
"#,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { foo: Foo { mut x } }: Bar) {}
"#,
)
}
#[test]
fn ref_record_field() {
check_assist(
destructure_struct_binding,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { ref $0foo }: Bar) {
let _ = foo.x;
}
"#,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { foo: Foo { ref x } }: Bar) {
let _ = *x;
}
"#,
)
}
#[test]
fn ref_mut_record_field() {
check_assist(
destructure_struct_binding,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { ref mut $0foo }: Bar) {
let _ = foo.x;
}
"#,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { foo: Foo { ref mut x } }: Bar) {
let _ = *x;
}
"#,
)
}
#[test]
fn ref_mut_record_renamed_field() {
check_assist(
destructure_struct_binding,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { foo: ref mut $0foo1 }: Bar) {
let _ = foo1.x;
}
"#,
r#"
struct Foo { x: () }
struct Bar { foo: Foo }
fn f(Bar { foo: Foo { ref mut x } }: Bar) {
let _ = *x;
}
"#,
)
}
#[test]
fn mut_ref() {
check_assist(
@ -610,6 +697,52 @@ mod tests {
)
}
#[test]
fn ref_not_add_parenthesis_and_deref_record() {
check_assist(
destructure_struct_binding,
r#"
struct Foo { bar: i32, baz: i32 }
fn main() {
let $0foo = &Foo { bar: 1, baz: 2 };
let _ = &foo.bar;
}
"#,
r#"
struct Foo { bar: i32, baz: i32 }
fn main() {
let Foo { bar, baz } = &Foo { bar: 1, baz: 2 };
let _ = bar;
}
"#,
)
}
#[test]
fn ref_not_add_parenthesis_and_deref_tuple() {
check_assist(
destructure_struct_binding,
r#"
struct Foo(i32, i32);
fn main() {
let $0foo = &Foo(1, 2);
let _ = &foo.0;
}
"#,
r#"
struct Foo(i32, i32);
fn main() {
let Foo(_0, _1) = &Foo(1, 2);
let _ = _0;
}
"#,
)
}
#[test]
fn record_struct_name_collision() {
check_assist(

View file

@ -24,7 +24,7 @@ use crate::{AssistContext, AssistId, Assists};
// struct Bar { y: Y, z: Z }
//
// fn foo(bar: Bar) {
// let Bar { y, z } = bar;
// let Bar { y, z } = bar;
// }
// ```
fn expand_record_rest_pattern(
@ -53,18 +53,17 @@ fn expand_record_rest_pattern(
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(rest_pat.syntax());
let new_field_list = make.record_pat_field_list(old_field_list.fields(), None);
for (f, _) in missing_fields.iter() {
let field = make.record_pat_field_shorthand(
let new_fields = old_field_list.fields().chain(missing_fields.iter().map(|(f, _)| {
make.record_pat_field_shorthand(
make.ident_pat(
false,
false,
make.name(&f.name(ctx.sema.db).display_no_db(edition).to_smolstr()),
)
.into(),
);
new_field_list.add_field(field);
}
)
}));
let new_field_list = make.record_pat_field_list(new_fields, None);
editor.replace(old_field_list.syntax(), new_field_list.syntax());
@ -114,9 +113,7 @@ fn expand_tuple_struct_rest_pattern(
};
let rest_pat = rest_pat.into();
let mut pats = pat.fields();
let prefix_count = pats.by_ref().position(|p| p == rest_pat)?;
let suffix_count = pats.count();
let (prefix_count, suffix_count) = calculate_counts(&rest_pat, pat.fields())?;
if fields.len().saturating_sub(prefix_count).saturating_sub(suffix_count) == 0 {
cov_mark::hit!(no_missing_fields_tuple_struct);
@ -142,16 +139,13 @@ fn expand_tuple_struct_rest_pattern(
pat.fields()
.take(prefix_count)
.chain(fields[prefix_count..fields.len() - suffix_count].iter().map(|f| {
make.ident_pat(
false,
false,
match name_gen.for_type(&f.ty(ctx.sema.db), ctx.sema.db, ctx.edition())
{
Some(name) => make.name(&name),
None => make.name(&format!("_{}", f.index())),
},
gen_unnamed_pat(
ctx,
&make,
&mut name_gen,
&f.ty(ctx.db()).to_type(ctx.sema.db),
f.index(),
)
.into()
}))
.chain(pat.fields().skip(prefix_count + 1)),
);
@ -164,6 +158,134 @@ fn expand_tuple_struct_rest_pattern(
)
}
// Assist: expand_tuple_rest_pattern
//
// Fills fields by replacing rest pattern in tuple patterns.
//
// ```
// fn foo(bar: (char, i32, i32)) {
// let (ch, ..$0) = bar;
// }
// ```
// ->
// ```
// fn foo(bar: (char, i32, i32)) {
// let (ch, _1, _2) = bar;
// }
// ```
fn expand_tuple_rest_pattern(
acc: &mut Assists,
ctx: &AssistContext<'_>,
pat: ast::TuplePat,
rest_pat: ast::RestPat,
) -> Option<()> {
let fields = ctx.sema.type_of_pat(&pat.clone().into())?.original.tuple_fields(ctx.db());
let len = fields.len();
let rest_pat = rest_pat.into();
let (prefix_count, suffix_count) = calculate_counts(&rest_pat, pat.fields())?;
if len.saturating_sub(prefix_count).saturating_sub(suffix_count) == 0 {
cov_mark::hit!(no_missing_fields_tuple);
return None;
}
let old_range = ctx.sema.original_range_opt(pat.syntax())?;
if old_range.file_id != ctx.file_id() {
return None;
}
acc.add(
AssistId::refactor_rewrite("expand_tuple_rest_pattern"),
"Fill tuple fields",
rest_pat.syntax().text_range(),
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(rest_pat.syntax());
let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax()));
let new_pat = make.tuple_pat(
pat.fields()
.take(prefix_count)
.chain(fields[prefix_count..len - suffix_count].iter().enumerate().map(
|(index, ty)| {
gen_unnamed_pat(ctx, &make, &mut name_gen, ty, prefix_count + index)
},
))
.chain(pat.fields().skip(prefix_count + 1)),
);
editor.replace(pat.syntax(), new_pat.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
// Assist: expand_slice_rest_pattern
//
// Fills fields by replacing rest pattern in slice patterns.
//
// ```
// fn foo(bar: [i32; 3]) {
// let [first, ..$0] = bar;
// }
// ```
// ->
// ```
// fn foo(bar: [i32; 3]) {
// let [first, _1, _2] = bar;
// }
// ```
fn expand_slice_rest_pattern(
acc: &mut Assists,
ctx: &AssistContext<'_>,
pat: ast::SlicePat,
rest_pat: ast::RestPat,
) -> Option<()> {
let (ty, len) = ctx.sema.type_of_pat(&pat.clone().into())?.original.as_array(ctx.db())?;
let rest_pat = rest_pat.into();
let (prefix_count, suffix_count) = calculate_counts(&rest_pat, pat.pats())?;
if len.saturating_sub(prefix_count).saturating_sub(suffix_count) == 0 {
cov_mark::hit!(no_missing_fields_slice);
return None;
}
let old_range = ctx.sema.original_range_opt(pat.syntax())?;
if old_range.file_id != ctx.file_id() {
return None;
}
acc.add(
AssistId::refactor_rewrite("expand_slice_rest_pattern"),
"Fill slice fields",
rest_pat.syntax().text_range(),
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(rest_pat.syntax());
let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax()));
let new_pat = make.slice_pat(
pat.pats()
.take(prefix_count)
.chain(
(prefix_count..len - suffix_count)
.map(|index| gen_unnamed_pat(ctx, &make, &mut name_gen, &ty, index)),
)
.chain(pat.pats().skip(prefix_count + 1)),
);
editor.replace(pat.syntax(), new_pat.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let rest_pat = ctx.find_node_at_offset::<ast::RestPat>()?;
let parent = rest_pat.syntax().parent()?;
@ -171,15 +293,40 @@ pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) ->
match parent {
ast::RecordPatFieldList(it) => expand_record_rest_pattern(acc, ctx, it.syntax().parent().and_then(ast::RecordPat::cast)?, rest_pat),
ast::TupleStructPat(it) => expand_tuple_struct_rest_pattern(acc, ctx, it, rest_pat),
// FIXME
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
ast::TuplePat(it) => expand_tuple_rest_pattern(acc, ctx, it, rest_pat),
ast::SlicePat(it) => expand_slice_rest_pattern(acc, ctx, it, rest_pat),
_ => None,
}
}
}
fn gen_unnamed_pat(
ctx: &AssistContext<'_>,
make: &SyntaxFactory,
name_gen: &mut NameGenerator,
ty: &hir::Type<'_>,
index: usize,
) -> ast::Pat {
make.ident_pat(
false,
false,
match name_gen.for_type(ty, ctx.sema.db, ctx.edition()) {
Some(name) => make.name(&name),
None => make.name(&format!("_{index}")),
},
)
.into()
}
fn calculate_counts(
rest_pat: &ast::Pat,
mut pats: ast::AstChildren<ast::Pat>,
) -> Option<(usize, usize)> {
let prefix_count = pats.by_ref().position(|p| p == *rest_pat)?;
let suffix_count = pats.count();
Some((prefix_count, suffix_count))
}
#[cfg(test)]
mod tests {
use super::*;
@ -211,7 +358,7 @@ enum Foo {
fn bar(foo: Foo) {
match foo {
Foo::A(_) => false,
Foo::B{ y, z } => true,
Foo::B{ y, z } => true,
};
}
"#,
@ -272,7 +419,7 @@ struct Bar {
}
fn foo(bar: Bar) {
let Bar { y, z } = bar;
let Bar { y, z } = bar;
}
"#,
);
@ -349,6 +496,79 @@ fn foo(bar: Bar) {
)
}
#[test]
fn fill_tuple_with_fields() {
check_assist(
expand_rest_pattern,
r#"
fn foo(bar: (char, i32, i32)) {
let (ch, ..$0) = bar;
}
"#,
r#"
fn foo(bar: (char, i32, i32)) {
let (ch, _1, _2) = bar;
}
"#,
);
check_assist(
expand_rest_pattern,
r#"
fn foo(bar: (char, i32, i32)) {
let (ch, ..$0, end) = bar;
}
"#,
r#"
fn foo(bar: (char, i32, i32)) {
let (ch, _1, end) = bar;
}
"#,
);
}
#[test]
fn fill_array_with_fields() {
check_assist(
expand_rest_pattern,
r#"
fn foo(bar: [i32; 4]) {
let [first, ..$0] = bar;
}
"#,
r#"
fn foo(bar: [i32; 4]) {
let [first, _1, _2, _3] = bar;
}
"#,
);
check_assist(
expand_rest_pattern,
r#"
fn foo(bar: [i32; 4]) {
let [first, second, ..$0] = bar;
}
"#,
r#"
fn foo(bar: [i32; 4]) {
let [first, second, _2, _3] = bar;
}
"#,
);
check_assist(
expand_rest_pattern,
r#"
fn foo(bar: [i32; 4]) {
let [first, second, ..$0, end] = bar;
}
"#,
r#"
fn foo(bar: [i32; 4]) {
let [first, second, _2, end] = bar;
}
"#,
);
}
#[test]
fn fill_fields_struct_generated_by_macro() {
check_assist(
@ -376,7 +596,7 @@ macro_rules! position {
position!(usize);
fn macro_call(pos: Pos) {
let Pos { x, y } = pos;
let Pos { x, y } = pos;
}
"#,
);
@ -420,7 +640,7 @@ enum_gen!(usize);
fn macro_call(foo: Foo) {
match foo {
Foo::A(_) => false,
Foo::B{ x, y } => true,
Foo::B{ x, y } => true,
}
}
"#,
@ -484,6 +704,8 @@ fn bar(foo: Foo) {
// This is still possible even though it's meaningless
cov_mark::check!(no_missing_fields);
cov_mark::check!(no_missing_fields_tuple_struct);
cov_mark::check!(no_missing_fields_tuple);
cov_mark::check!(no_missing_fields_slice);
check_assist_not_applicable(
expand_rest_pattern,
r#"
@ -521,6 +743,22 @@ struct Bar(Y, Z)
fn foo(bar: Bar) {
let Bar(y, ..$0, z) = bar;
}
"#,
);
check_assist_not_applicable(
expand_rest_pattern,
r#"
fn foo(bar: (i32, i32)) {
let (y, ..$0, z) = bar;
}
"#,
);
check_assist_not_applicable(
expand_rest_pattern,
r#"
fn foo(bar: [i32; 2]) {
let [y, ..$0, z] = bar;
}
"#,
);
}

View file

@ -285,7 +285,7 @@ fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
/// In general that's true for any expression, but in some cases that would produce invalid code.
fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
match node.kind() {
SyntaxKind::PATH_EXPR | SyntaxKind::LOOP_EXPR => None,
SyntaxKind::PATH_EXPR | SyntaxKind::LOOP_EXPR | SyntaxKind::LET_EXPR => None,
SyntaxKind::BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
SyntaxKind::RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
SyntaxKind::BLOCK_EXPR => {
@ -1403,6 +1403,25 @@ fn main() {
);
}
#[test]
fn extract_var_let_expr() {
check_assist_by_label(
extract_variable,
r#"
fn main() {
if $0let$0 Some(x) = Some(2+2) {}
}
"#,
r#"
fn main() {
let $0var_name = Some(2+2);
if let Some(x) = var_name {}
}
"#,
"Extract into variable",
);
}
#[test]
fn extract_var_for_cast() {
check_assist_by_label(
@ -1738,6 +1757,14 @@ fn main() {
check_assist_not_applicable(extract_variable, "fn main() { loop { $0break$0; }; }");
}
#[test]
fn extract_var_for_let_expr_not_applicable() {
check_assist_not_applicable(
extract_variable,
"fn main() { if $0let Some(x) = Some(2+2) {} }",
);
}
#[test]
fn extract_var_unit_expr_not_applicable() {
check_assist_not_applicable(

View file

@ -39,6 +39,9 @@ pub(crate) fn generate_default_from_enum_variant(
cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented);
return None;
}
if !variant.syntax().text_range().contains_range(ctx.selection_trimmed()) {
return None;
}
if existing_default_impl(&ctx.sema, &variant).is_some() {
cov_mark::hit!(test_gen_default_impl_already_exists);
@ -114,6 +117,49 @@ impl Default for Variant {
);
}
#[test]
fn test_generate_default_selected_variant() {
check_assist(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant {
Undefined,
$0Minor$0,
Major,
}
"#,
r#"
enum Variant {
Undefined,
Minor,
Major,
}
impl Default for Variant {
fn default() -> Self {
Self::Minor
}
}
"#,
);
}
#[test]
fn test_generate_default_not_applicable_with_multiple_variant_selection() {
check_assist_not_applicable(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant {
Undefined,
$0Minor,
M$0ajor,
}
"#,
);
}
#[test]
fn test_generate_default_already_implemented() {
cov_mark::check!(test_gen_default_impl_already_exists);

View file

@ -124,6 +124,18 @@ mod tests {
)
}
#[test]
fn invert_if_doesnt_apply_with_if_let_chain() {
check_assist_not_applicable(
invert_if,
"fn f() { i$0f x && let Some(_) = Some(1) { 1 } else { 0 } }",
);
check_assist_not_applicable(
invert_if,
"fn f() { i$0f let Some(_) = Some(1) && x { 1 } else { 0 } }",
);
}
#[test]
fn invert_if_option_case() {
check_assist(

View file

@ -53,6 +53,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
};
let tgt: ast::Expr = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() {
let if_expr = std::iter::successors(Some(if_expr), |it| {
it.syntax().parent().and_then(ast::IfExpr::cast)
})
.last()?;
collector.collect_if(&if_expr)?;
if_expr.into()
} else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() {
@ -237,6 +241,37 @@ fn foo() {
);
}
#[test]
fn test_pull_assignment_up_inner_if() {
check_assist(
pull_assignment_up,
r#"
fn foo() {
let mut a = 1;
if true {
a = 2;
} else if true {
$0a = 3;
} else {
a = 4;
}
}"#,
r#"
fn foo() {
let mut a = 1;
a = if true {
2
} else if true {
3
} else {
4
};
}"#,
);
}
#[test]
fn test_pull_assignment_up_match() {
check_assist(

View file

@ -83,7 +83,9 @@ fn compute_dbg_replacement(
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT))
.map(|tokens| tokens.collect::<Vec<_>>())
.filter(|tokens| !tokens.iter().all(|it| it.kind().is_trivia()))
.map(|tokens| syntax::hacks::parse_expr_from_str(&tokens.iter().join(""), Edition::CURRENT))
.collect::<Option<Vec<ast::Expr>>>()?;
let parent = macro_expr.syntax().parent()?;
@ -268,6 +270,8 @@ fn foo() {
dbg!('x');
dbg!(&n);
dbg!(n);
dbg!(n,);
dbg!(n, );
// needless comment
dbg!("foo");$0
}
@ -281,6 +285,17 @@ fn foo() {
);
}
#[test]
fn test_remove_trailing_comma_dbg() {
check("$0dbg!(1 + 1,)", "1 + 1");
check("$0dbg!(1 + 1, )", "1 + 1");
check("$0dbg!(1 + 1,\n)", "1 + 1");
check("$0dbg!(1 + 1, 2 + 3)", "(1 + 1, 2 + 3)");
check("$0dbg!(1 + 1, 2 + 3 )", "(1 + 1, 2 + 3)");
check("$0dbg!(1 + 1, 2 + 3, )", "(1 + 1, 2 + 3)");
check("$0dbg!(1 + 1, 2 + 3 ,)", "(1 + 1, 2 + 3)");
}
#[test]
fn test_remove_dbg_not_applicable() {
check_assist_not_applicable(remove_dbg, "fn main() {$0vec![1, 2, 3]}");

View file

@ -1,7 +1,7 @@
use ide_db::assists::{AssistId, GroupLabel};
use syntax::{
AstNode, TextRange,
ast::{self, ArithOp, BinaryOp},
AstNode,
ast::{self, ArithOp, BinaryOp, syntax_factory::SyntaxFactory},
};
use crate::assist_context::{AssistContext, Assists};
@ -71,24 +71,31 @@ pub(crate) fn replace_arith_with_wrapping(
fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) -> Option<()> {
let (lhs, op, rhs) = parse_binary_op(ctx)?;
let op_expr = lhs.syntax().parent()?;
if !is_primitive_int(ctx, &lhs) || !is_primitive_int(ctx, &rhs) {
return None;
}
let start = lhs.syntax().text_range().start();
let end = rhs.syntax().text_range().end();
let range = TextRange::new(start, end);
acc.add_group(
&GroupLabel("Replace arithmetic...".into()),
kind.assist_id(),
kind.label(),
range,
op_expr.text_range(),
|builder| {
let mut edit = builder.make_editor(rhs.syntax());
let make = SyntaxFactory::with_mappings();
let method_name = kind.method_name(op);
builder.replace(range, format!("{lhs}.{method_name}({rhs})"))
let needs_parentheses =
lhs.precedence().needs_parentheses_in(ast::prec::ExprPrecedence::Postfix);
let receiver = if needs_parentheses { make.expr_paren(lhs).into() } else { lhs };
let arith_expr =
make.expr_method_call(receiver, make.name_ref(&method_name), make.arg_list([rhs]));
edit.replace(op_expr, arith_expr.syntax());
edit.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), edit);
},
)
}
@ -227,6 +234,23 @@ fn main() {
)
}
#[test]
fn replace_arith_with_wrapping_add_add_parenthesis() {
check_assist(
replace_arith_with_wrapping,
r#"
fn main() {
let x = 1*x $0+ 2;
}
"#,
r#"
fn main() {
let x = (1*x).wrapping_add(2);
}
"#,
)
}
#[test]
fn replace_arith_not_applicable_with_non_empty_selection() {
check_assist_not_applicable(

View file

@ -328,7 +328,14 @@ fn pick_pattern_and_expr_order(
(pat, pat2) => match (binds_name(sema, &pat), binds_name(sema, &pat2)) {
(true, true) => return None,
(true, false) => (pat, guard, expr, expr2),
(false, true) => (pat2, guard2, expr2, expr),
(false, true) => {
// This pattern triggers an invalid transformation.
// See issues #11373, #19443
if let ast::Pat::IdentPat(_) = pat2 {
return None;
}
(pat2, guard2, expr2, expr)
}
_ if is_sad_pat(sema, &pat) => (pat2, guard2, expr2, expr),
(false, false) => (pat, guard, expr, expr2),
},
@ -1892,4 +1899,19 @@ fn main() {
"#,
)
}
#[test]
fn test_replace_match_with_if_let_not_applicable_pat2_is_ident_pat() {
check_assist_not_applicable(
replace_match_with_if_let,
r"
fn test(a: i32) {
match$0 a {
1 => code(),
other => code(other),
}
}
",
)
}
}

View file

@ -31,6 +31,9 @@ pub(crate) fn replace_is_method_with_if_let_method(
ast::Expr::MethodCallExpr(call) => call,
_ => return None,
};
if ctx.offset() > if_expr.then_branch()?.stmt_list()?.l_curly_token()?.text_range().end() {
return None;
}
let name_ref = call_expr.name_ref()?;
match name_ref.text().as_str() {
@ -188,6 +191,21 @@ fn main() {
let x = Ok(1);
if x.is_e$0rr() {}
}
"#,
);
}
#[test]
fn replace_is_some_with_if_let_some_not_applicable_after_l_curly() {
check_assist_not_applicable(
replace_is_method_with_if_let_method,
r#"
fn main() {
let x = Some(1);
if x.is_some() {
()$0
}
}
"#,
);
}

View file

@ -1035,7 +1035,41 @@ fn foo(bar: Bar) {
struct Bar { y: Y, z: Z }
fn foo(bar: Bar) {
let Bar { y, z } = bar;
let Bar { y, z } = bar;
}
"#####,
)
}
#[test]
fn doctest_expand_slice_rest_pattern() {
check_doc_test(
"expand_slice_rest_pattern",
r#####"
fn foo(bar: [i32; 3]) {
let [first, ..$0] = bar;
}
"#####,
r#####"
fn foo(bar: [i32; 3]) {
let [first, _1, _2] = bar;
}
"#####,
)
}
#[test]
fn doctest_expand_tuple_rest_pattern() {
check_doc_test(
"expand_tuple_rest_pattern",
r#####"
fn foo(bar: (char, i32, i32)) {
let (ch, ..$0) = bar;
}
"#####,
r#####"
fn foo(bar: (char, i32, i32)) {
let (ch, _1, _2) = bar;
}
"#####,
)

View file

@ -691,6 +691,9 @@ pub(super) fn complete_name(
NameKind::RecordField => {
field::complete_field_list_record_variant(acc, ctx);
}
NameKind::TypeParam => {
acc.add_keyword_snippet(ctx, "const", "const $1: $0");
}
NameKind::ConstParam
| NameKind::Enum
| NameKind::MacroDef
@ -700,7 +703,6 @@ pub(super) fn complete_name(
| NameKind::Static
| NameKind::Struct
| NameKind::Trait
| NameKind::TypeParam
| NameKind::Union
| NameKind::Variant => (),
}

View file

@ -70,7 +70,7 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
}
["cfg"] => cfg::complete_cfg(acc, ctx),
["cfg"] | ["cfg_attr"] => cfg::complete_cfg(acc, ctx),
["macro_use"] => macro_use::complete_macro_use(
acc,
ctx,

View file

@ -53,15 +53,33 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
acc.add(item.build(ctx.db));
}),
},
None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| {
let s = s.as_str();
let item =
CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s, ctx.edition);
acc.add(item.build(ctx.db));
}),
None => ctx
.krate
.potential_cfg(ctx.db)
.get_cfg_keys()
.unique()
.map(|s| (s.as_str(), ""))
.chain(CFG_CONDITION.iter().copied())
.for_each(|(s, snippet)| {
let mut item = CompletionItem::new(
SymbolKind::BuiltinAttr,
ctx.source_range(),
s,
ctx.edition,
);
if let Some(cap) = ctx.config.snippet_cap
&& !snippet.is_empty()
{
item.insert_snippet(cap, snippet);
}
acc.add(item.build(ctx.db));
}),
}
}
const CFG_CONDITION: &[(&str, &str)] =
&[("all", "all($0)"), ("any", "any($0)"), ("not", "not($0)")];
const KNOWN_ARCH: [&str; 20] = [
"aarch64",
"arm",

View file

@ -59,6 +59,7 @@ pub(crate) fn complete_expr_path(
in_block_expr,
in_breakable,
after_if_expr,
before_else_kw,
in_condition,
incomplete_let,
after_incomplete_let,
@ -386,7 +387,7 @@ pub(crate) fn complete_expr_path(
add_keyword("let", "let $1 = $0;");
}
if after_if_expr || after_incomplete_let {
if !before_else_kw && (after_if_expr || after_incomplete_let) {
add_keyword("else", "else {\n $0\n}");
}

View file

@ -28,7 +28,11 @@ pub(crate) fn complete_record_pattern_fields(
record_pat.record_pat_field_list().and_then(|fl| fl.fields().next()).is_some();
match were_fields_specified {
false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(),
false => un
.fields(ctx.db)
.into_iter()
.map(|f| (f, f.ty(ctx.db).to_type(ctx.db)))
.collect(),
true => return,
}
}
@ -56,7 +60,11 @@ pub(crate) fn complete_record_expr_fields(
record_expr.record_expr_field_list().and_then(|fl| fl.fields().next()).is_some();
match were_fields_specified {
false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(),
false => un
.fields(ctx.db)
.into_iter()
.map(|f| (f, f.ty(ctx.db).to_type(ctx.db)))
.collect(),
true => return,
}
}

View file

@ -144,6 +144,7 @@ pub(crate) struct PathExprCtx<'db> {
pub(crate) in_block_expr: bool,
pub(crate) in_breakable: BreakableKind,
pub(crate) after_if_expr: bool,
pub(crate) before_else_kw: bool,
/// Whether this expression is the direct condition of an if or while expression
pub(crate) in_condition: bool,
pub(crate) incomplete_let: bool,

View file

@ -1,6 +1,7 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
use base_db::salsa;
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
@ -85,9 +86,15 @@ pub(super) fn expand_and_analyze<'db>(
let original_offset = expansion.original_offset + relative_offset;
let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
})
salsa::attach(sema.db, || analyze(sema, expansion, original_token, &token)).map(
|(analysis, expected, qualifier_ctx)| AnalysisResult {
analysis,
expected,
qualifier_ctx,
token,
original_offset,
},
)
}
fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
@ -637,6 +644,9 @@ fn expected_type_and_name<'db>(
.or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs)))
.map(TypeInfo::original);
(ty, None)
} else if let Some(ast::BinaryOp::LogicOp(_)) = it.op_kind() {
let ty = sema.type_of_expr(&it.clone().into()).map(TypeInfo::original);
(ty, None)
} else {
(None, None)
}
@ -707,9 +717,13 @@ fn expected_type_and_name<'db>(
(ty, None)
},
ast::IfExpr(it) => {
let ty = it.condition()
.and_then(|e| sema.type_of_expr(&e))
.map(TypeInfo::original);
let ty = if let Some(body) = it.then_branch()
&& token.text_range().end() > body.syntax().text_range().start()
{
sema.type_of_expr(&body.into())
} else {
it.condition().and_then(|e| sema.type_of_expr(&e))
}.map(TypeInfo::original);
(ty, None)
},
ast::IdentPat(it) => {
@ -1282,11 +1296,12 @@ fn classify_name_ref<'db>(
let after_incomplete_let = after_incomplete_let(it.clone()).is_some();
let incomplete_expr_stmt =
it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none());
let before_else_kw = before_else_kw(it);
let incomplete_let = it
.parent()
.and_then(ast::LetStmt::cast)
.is_some_and(|it| it.semicolon_token().is_none())
|| after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw(it);
|| after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw;
let in_value = it.parent().and_then(Either::<ast::LetStmt, ast::ArgList>::cast).is_some();
let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
@ -1302,6 +1317,7 @@ fn classify_name_ref<'db>(
in_block_expr,
in_breakable: in_loop_body,
after_if_expr,
before_else_kw,
in_condition,
ref_expr_parent,
after_amp,

View file

@ -278,6 +278,62 @@ fn foo() {
)
}
#[test]
fn expected_type_if_let_chain_bool() {
check_expected_type_and_name(
r#"
fn foo() {
let f = Foo::Quux;
if let c = f && $0 { }
}
"#,
expect![[r#"ty: bool, name: ?"#]],
);
}
#[test]
fn expected_type_if_condition() {
check_expected_type_and_name(
r#"
fn foo() {
if a$0 { }
}
"#,
expect![[r#"ty: bool, name: ?"#]],
);
}
#[test]
fn expected_type_if_body() {
check_expected_type_and_name(
r#"
enum Foo { Bar, Baz, Quux }
fn foo() {
let _: Foo = if true {
$0
};
}
"#,
expect![[r#"ty: Foo, name: ?"#]],
);
check_expected_type_and_name(
r#"
enum Foo { Bar, Baz, Quux }
fn foo() {
let _: Foo = if true {
Foo::Bar
} else {
$0
};
}
"#,
expect![[r#"ty: Foo, name: ?"#]],
);
}
#[test]
fn expected_type_fn_ret_without_leading_char() {
cov_mark::check!(expected_type_fn_ret_without_leading_char);
@ -526,3 +582,16 @@ fn foo() {
expect![[r#"ty: State, name: ?"#]],
);
}
#[test]
fn expected_type_logic_op() {
check_expected_type_and_name(
r#"
enum State { Stop }
fn foo() {
true && $0;
}
"#,
expect![[r#"ty: bool, name: ?"#]],
);
}

View file

@ -163,6 +163,7 @@ fn render_pat(
PatternContext {
param_ctx: Some(ParamContext { kind: ParamKind::Function(_), .. }),
has_type_ascription: false,
parent_pat: None,
..
}
);

View file

@ -26,6 +26,7 @@ mod visibility;
use base_db::{SourceDatabase, salsa};
use expect_test::Expect;
use hir::db::HirDatabase;
use hir::{PrefixKind, setup_tracing};
use ide_db::{
FilePosition, RootDatabase, SnippetCap,
@ -306,8 +307,11 @@ pub(crate) fn get_all_items(
trigger_character: Option<char>,
) -> Vec<CompletionItem> {
let (db, position) = position(code);
let res = salsa::attach(&db, || crate::completions(&db, &config, position, trigger_character))
.map_or_else(Vec::default, Into::into);
let res = salsa::attach(&db, || {
HirDatabase::zalsa_register_downcaster(&db);
crate::completions(&db, &config, position, trigger_character)
})
.map_or_else(Vec::default, Into::into);
// validate
res.iter().for_each(|it| {
let sr = it.source_range;

View file

@ -815,7 +815,10 @@ mod cfg {
#[cfg($0)]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
@ -827,7 +830,74 @@ mod cfg {
#[cfg(b$0)]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
"#]],
);
}
#[test]
fn inside_cfg_attr() {
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg_attr($0)]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
"#]],
);
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg_attr(b$0)]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
"#]],
);
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg_attr($0, allow(deprecated))]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
"#]],
);
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg_attr(b$0, allow(deprecated))]
"#,
expect![[r#"
ba all
ba any
ba dbg
ba not
ba opt_level
ba test
ba true
@ -852,6 +922,20 @@ mod cfg {
"#]],
);
}
#[test]
fn inside_conditional() {
check_edit(
"all",
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg($0)]
"#,
r#"
#[cfg(all($0))]
"#,
);
}
}
mod derive {

View file

@ -271,8 +271,6 @@ fn complete_in_block() {
sn macro_rules
sn pd
sn ppd
ex false
ex true
"#]],
)
}
@ -1668,12 +1666,138 @@ fn foo() { let x = if foo {} $0; let y = 92; }
fn foo() { let x = if foo {} $0 else {}; }
"#,
expect![[r#"
fn foo fn()
fn foo() fn()
bt u32 u32
kw async
kw const
kw crate::
kw else if
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw impl for
kw let
kw letm
kw loop
kw match
kw mod
kw return
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
check(
r#"
fn foo() { let x = if foo {} $0 else if true {}; }
"#,
expect![[r#"
fn foo() fn()
bt u32 u32
kw async
kw const
kw crate::
kw else if
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw impl for
kw let
kw letm
kw loop
kw match
kw mod
kw return
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
check(
r#"
fn foo() { let x = if foo {} el$0 else if true {} else {}; }
"#,
expect![[r#"
fn foo() fn()
lc x ()
bt u32 u32
kw async
kw const
kw crate::
kw else if
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw impl for
kw let
kw letm
kw loop
kw match
kw mod
kw return
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
check(
r#"
fn foo() { let x = if foo {} $0 else if true {} else {}; }
"#,
expect![[r#"
fn foo() fn()
bt u32 u32
kw async
kw const
kw crate::
kw else
kw else if
kw enum
kw extern

View file

@ -398,6 +398,25 @@ fn foo($0) {}
)
}
#[test]
fn completes_in_fn_param_in_nested_pattern() {
check(
r#"
struct Foo { num: u32 }
struct Bar(Foo);
fn foo(Bar($0)) {}
"#,
expect![[r#"
st Bar
st Foo
bn Bar() Bar($1)$0
bn Foo {} Foo { num$1 }$0
kw mut
kw ref
"#]],
)
}
#[test]
fn completes_in_closure_param() {
check(

View file

@ -1510,3 +1510,28 @@ fn foo<T>() {
"#]],
);
}
#[test]
fn fn_generic_params_const_param_snippet() {
check_edit("const", "fn foo<c$0>() {}", "fn foo<const $1: $0>() {}");
check_edit("const", "fn foo<T, c$0>() {}", "fn foo<T, const $1: $0>() {}");
check(
r#"
fn foo<T: $0>() {}
"#,
expect![[r#"
kw crate::
kw self::
"#]],
);
check(
r#"
fn foo<const N: $0>() {}
"#,
expect![[r#"
bt u32 u32
kw crate::
kw self::
"#]],
);
}

View file

@ -429,18 +429,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
en Enum Enum
ma makro!() macro_rules! makro
en Enum Enum
ma makro!() macro_rules! makro
md module
sp Self dyn Tr<{unknown}>
st Record Record
st S S
st Tuple Tuple
st Unit Unit
sp Self dyn Tr<{unknown}> + 'static
st Record Record
st S S
st Tuple Tuple
st Unit Unit
tt Tr
tt Trait
un Union Union
bt u32 u32
un Union Union
bt u32 u32
kw crate::
kw self::
"#]],

View file

@ -265,10 +265,7 @@ pub fn is_pattern_cond(expr: ast::Expr) -> bool {
ast::Expr::BinExpr(expr)
if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) =>
{
expr.lhs()
.map(is_pattern_cond)
.or_else(|| expr.rhs().map(is_pattern_cond))
.unwrap_or(false)
expr.lhs().map_or(false, is_pattern_cond) || expr.rhs().map_or(false, is_pattern_cond)
}
ast::Expr::ParenExpr(expr) => expr.expr().is_some_and(is_pattern_cond),
ast::Expr::LetExpr(_) => true,

View file

@ -473,7 +473,7 @@ mod tests {
frange.range,
"selection is not an expression(yet contained in one)"
);
let name = NameGenerator::default().for_variable(&expr, &sema);
let name = salsa::attach(sema.db, || NameGenerator::default().for_variable(&expr, &sema));
assert_eq!(&name, expected);
}

View file

@ -88,4 +88,16 @@ fn bar<const F: Foo>() {}
"#,
);
}
#[test]
fn fn_traits() {
check_diagnostics(
r#"
//- minicore: fn
struct WithLifetime<'a>(&'a ());
fn foo<T: Fn(WithLifetime) -> WithLifetime>() {}
"#,
);
}
}

View file

@ -441,6 +441,49 @@ fn main() {
)
}
#[test]
fn raw_deref_on_union_field() {
check_diagnostics(
r#"
fn main() {
union U {
a: u8
}
let x = U { a: 3 };
let a = &raw mut x.a;
union U1 {
a: u8
}
let x = U1 { a: 3 };
let a = x.a;
// ^^^ 💡 error: access to union field is unsafe and requires an unsafe function or block
let b = &raw const x.a;
let tmp = Vec::from([1, 2, 3]);
let c = &raw const tmp[x.a];
// ^^^ 💡 error: access to union field is unsafe and requires an unsafe function or block
union URef {
p: &'static mut i32,
}
fn deref_union_field(u: URef) {
// Not an assignment but an access to the union field!
*(u.p) = 13;
// ^^^ 💡 error: access to union field is unsafe and requires an unsafe function or block
}
}
"#,
)
}
#[test]
fn unsafe_expr_as_an_argument_of_a_method_call() {
check_fix(

View file

@ -6,7 +6,7 @@ use ide_db::{
label::Label,
source_change::SourceChange,
};
use syntax::{Edition, TextRange};
use syntax::{AstNode, Edition, TextRange, ToSmolStr};
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
@ -24,15 +24,21 @@ pub(crate) fn unused_variables(
}
let diagnostic_range = ctx.sema.diagnostics_display_range(ast);
// The range for the Actual Name. We don't want to replace the entire declaration. Using the diagnostic range causes issues within in Array Destructuring.
let name_range = d
.local
.primary_source(ctx.sema.db)
let primary_source = d.local.primary_source(ctx.sema.db);
let name_range = primary_source
.name()
.map(|v| v.syntax().original_file_range_rooted(ctx.sema.db))
.filter(|it| {
Some(it.file_id) == ast.file_id.file_id()
&& diagnostic_range.range.contains_range(it.range)
});
let is_shorthand_field = primary_source
.source
.value
.left()
.and_then(|name| name.syntax().parent())
.and_then(syntax::ast::RecordPatField::cast)
.is_some_and(|field| field.colon_token().is_none());
let var_name = d.local.name(ctx.sema.db);
Some(
Diagnostic::new_with_syntax_node_ptr(
@ -48,6 +54,7 @@ pub(crate) fn unused_variables(
it.range,
diagnostic_range,
ast.file_id.is_macro(),
is_shorthand_field,
ctx.edition,
)
})),
@ -60,24 +67,24 @@ fn fixes(
name_range: TextRange,
diagnostic_range: FileRange,
is_in_marco: bool,
is_shorthand_field: bool,
edition: Edition,
) -> Option<Vec<Assist>> {
if is_in_marco {
return None;
}
let name = var_name.display(db, edition).to_smolstr();
let name = name.strip_prefix("r#").unwrap_or(&name);
let new_name = if is_shorthand_field { format!("{name}: _{name}") } else { format!("_{name}") };
Some(vec![Assist {
id: AssistId::quick_fix("unscore_unused_variable_name"),
label: Label::new(format!(
"Rename unused {} to _{}",
var_name.display(db, edition),
var_name.display(db, edition)
)),
label: Label::new(format!("Rename unused {name} to {new_name}")),
group: None,
target: diagnostic_range.range,
source_change: Some(SourceChange::from_text_edit(
diagnostic_range.file_id,
TextEdit::replace(name_range, format!("_{}", var_name.display(db, edition))),
TextEdit::replace(name_range, new_name),
)),
command: None,
}])
@ -220,11 +227,24 @@ struct Foo { f1: i32, f2: i64 }
fn main() {
let f = Foo { f1: 0, f2: 0 };
match f {
Foo { _f1, f2 } => {
Foo { f1: _f1, f2 } => {
_ = f2;
}
}
}
"#,
);
check_fix(
r#"
fn main() {
let $0r#type = 2;
}
"#,
r#"
fn main() {
let _type = 2;
}
"#,
);
}
@ -263,6 +283,46 @@ fn main() {
);
}
#[test]
fn unused_variable_in_record_field() {
check_fix(
r#"
struct S { field : u32 }
fn main() {
let s = S { field : 2 };
let S { field: $0x } = s
}
"#,
r#"
struct S { field : u32 }
fn main() {
let s = S { field : 2 };
let S { field: _x } = s
}
"#,
);
}
#[test]
fn unused_variable_in_shorthand_record_field() {
check_fix(
r#"
struct S { field : u32 }
fn main() {
let s = S { field : 2 };
let S { $0field } = s
}
"#,
r#"
struct S { field : u32 }
fn main() {
let s = S { field : 2 };
let S { field: _field } = s
}
"#,
);
}
// regression test as we used to panic in this scenario
#[test]
fn unknown_struct_pattern_param_type() {

View file

@ -88,7 +88,7 @@ pub(crate) fn goto_type_definition(
ast::Pat(it) => sema.type_of_pat(&it)?.original,
ast::SelfParam(it) => sema.type_of_self(&it)?,
ast::Type(it) => sema.resolve_type(&it)?,
ast::RecordField(it) => sema.to_def(&it)?.ty(db),
ast::RecordField(it) => sema.to_def(&it)?.ty(db).to_type(db),
// can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise
ast::NameRef(it) => {
if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) {

View file

@ -440,7 +440,7 @@ pub(crate) fn hover_for_definition(
Definition::Local(it) => Some(it.ty(db)),
Definition::GenericParam(hir::GenericParam::ConstParam(it)) => Some(it.ty(db)),
Definition::GenericParam(hir::GenericParam::TypeParam(it)) => Some(it.ty(db)),
Definition::Field(field) => Some(field.ty(db)),
Definition::Field(field) => Some(field.ty(db).to_type(db)),
Definition::TupleField(it) => Some(it.ty(db)),
Definition::Function(it) => Some(it.ty(db)),
Definition::Adt(it) => Some(it.ty(db)),
@ -602,7 +602,7 @@ fn goto_type_action_for_def(
let ty = match def {
Definition::Local(it) => Some(it.ty(db)),
Definition::Field(field) => Some(field.ty(db)),
Definition::Field(field) => Some(field.ty(db).to_type(db)),
Definition::TupleField(field) => Some(field.ty(db)),
Definition::Const(it) => Some(it.ty(db)),
Definition::Static(it) => Some(it.ty(db)),

View file

@ -692,14 +692,14 @@ pub(super) fn definition(
}
let drop_info = match def {
Definition::Field(field) => {
DropInfo { drop_glue: field.ty(db).drop_glue(db), has_dtor: None }
DropInfo { drop_glue: field.ty(db).to_type(db).drop_glue(db), has_dtor: None }
}
Definition::Adt(Adt::Struct(strukt)) => {
let struct_drop_glue = strukt.ty_placeholders(db).drop_glue(db);
let mut fields_drop_glue = strukt
.fields(db)
.iter()
.map(|field| field.ty(db).drop_glue(db))
.map(|field| field.ty(db).to_type(db).drop_glue(db))
.max()
.unwrap_or(DropGlue::None);
let has_dtor = match (fields_drop_glue, struct_drop_glue) {
@ -727,7 +727,7 @@ pub(super) fn definition(
variant
.fields(db)
.iter()
.map(|field| field.ty(db).drop_glue(db))
.map(|field| field.ty(db).to_type(db).drop_glue(db))
.max()
.unwrap_or(DropGlue::None)
})
@ -742,7 +742,7 @@ pub(super) fn definition(
let fields_drop_glue = variant
.fields(db)
.iter()
.map(|field| field.ty(db).drop_glue(db))
.map(|field| field.ty(db).to_type(db).drop_glue(db))
.max()
.unwrap_or(DropGlue::None);
DropInfo { drop_glue: fields_drop_glue, has_dtor: None }

View file

@ -4796,6 +4796,48 @@ fn main() {
);
}
#[test]
fn const_generic_negative_literal_macro_expansion() {
// Test that negative literals work correctly in const generics
// when used through macro expansion. This ensures the transcriber
// doesn't wrap negative literals in parentheses, which would create
// invalid syntax like Foo::<(-1)> instead of Foo::<-1>.
check(
r#"
struct Foo<const I: i16> {
pub value: i16,
}
impl<const I: i16> Foo<I> {
pub fn new(value: i16) -> Self {
Self { value }
}
}
macro_rules! create_foo {
($val:expr) => {
Foo::<$val>::new($val)
};
}
fn main() {
let v$0alue = create_foo!(-1);
}
"#,
expect![[r#"
*value*
```rust
let value: Foo<-1>
```
---
size = 2, align = 2, no Drop
"#]],
);
}
#[test]
fn hover_self_param_shows_type() {
check(

View file

@ -380,7 +380,7 @@ fn main() {
let foo = foo4();
// ^^^ &dyn Fn(f64, f64) -> u32
let foo = foo5();
// ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
// ^^^ &dyn Fn(&(dyn Fn(f64, f64) -> u32 + 'static), f64) -> u32
let foo = foo6();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo7();

View file

@ -191,7 +191,7 @@ impl Tr for () {
//^ impl Tr for ()
impl dyn Tr {
}
//^ impl dyn Tr
//^ impl dyn Tr + 'static
static S0: () = 0;
static S1: () = {};

View file

@ -526,7 +526,7 @@ fn signature_help_for_tuple_struct_pat(
pat.syntax(),
token,
pat.fields(),
fields.into_iter().map(|it| it.ty(db)),
fields.into_iter().map(|it| it.ty(db).to_type(db)),
display_target,
))
}

View file

@ -278,7 +278,7 @@ impl StaticIndex<'_> {
for token in tokens {
let range = token.text_range();
let node = token.parent().unwrap();
match get_definitions(&sema, token.clone()) {
match salsa::attach(self.db, || get_definitions(&sema, token.clone())) {
Some(it) => {
for i in it {
add_token(i, range, &node);

View file

@ -96,7 +96,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="variable">u</span><span class="operator">.</span><span class="field unsafe">field</span><span class="semicolon">;</span>
<span class="operator">&</span><span class="variable">u</span><span class="operator">.</span><span class="field unsafe">field</span><span class="semicolon">;</span>
<span class="operator">&</span><span class="keyword">raw</span> <span class="keyword const">const</span> <span class="variable">u</span><span class="operator">.</span><span class="field unsafe">field</span><span class="semicolon">;</span>
<span class="operator">&</span><span class="keyword">raw</span> <span class="keyword const">const</span> <span class="variable">u</span><span class="operator">.</span><span class="field">field</span><span class="semicolon">;</span>
<span class="comment">// this should be safe!</span>
<span class="keyword">let</span> <span class="union">Union</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="punctuation">_</span> <span class="brace">}</span><span class="semicolon">;</span>
<span class="comment">// but not these</span>

Some files were not shown because too many files have changed in this diff Show more