Introduce term search to rust-analyzer

This commit is contained in:
Tavo Annus 2023-12-11 17:04:20 +02:00
parent ddf105b646
commit bb3c7cff60
15 changed files with 2030 additions and 74 deletions

View file

@ -68,7 +68,7 @@ use crate::{
#[allow(unreachable_pub)]
pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::could_unify;
pub use unify::{could_unify, could_unify_deeply};
use cast::CastCheck;
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};

View file

@ -82,6 +82,37 @@ pub fn could_unify(
unify(db, env, tys).is_some()
}
pub fn could_unify_deeply(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>,
) -> bool {
let mut table = InferenceTable::new(db, env);
let vars = Substitution::from_iter(
Interner,
tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => {
GenericArgData::Ty(table.new_type_var()).intern(Interner)
}
chalk_ir::VariableKind::Lifetime => {
GenericArgData::Ty(table.new_type_var()).intern(Interner)
} // FIXME: maybe wrong?
chalk_ir::VariableKind::Const(ty) => {
GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
}
}),
);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
// table.resolve_obligations_as_possible();
// table.propagate_diverging_flag();
// let ty1_with_vars = table.resolve_completely(ty1_with_vars);
// let ty2_with_vars = table.resolve_completely(ty2_with_vars);
table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
}
pub(crate) fn unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
@ -431,6 +462,18 @@ impl<'a> InferenceTable<'a> {
true
}
/// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that.
pub(crate) fn unify_deeply<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
let result = match self.try_unify(ty1, ty2) {
Ok(r) => r,
Err(_) => return false,
};
result.goals.iter().all(|goal| {
let canonicalized = self.canonicalize(goal.clone());
self.try_fulfill_obligation(&canonicalized)
})
}
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
@ -661,6 +704,38 @@ impl<'a> InferenceTable<'a> {
}
}
fn try_fulfill_obligation(
&mut self,
canonicalized: &Canonicalized<InEnvironment<Goal>>,
) -> bool {
let solution = self.db.trait_solve(
self.trait_env.krate,
self.trait_env.block,
canonicalized.value.clone(),
);
// FIXME: Does just returning `solution.is_some()` work?
match solution {
Some(Solution::Unique(canonical_subst)) => {
canonicalized.apply_solution(
self,
Canonical {
binders: canonical_subst.binders,
// FIXME: handle constraints
value: canonical_subst.value.subst,
},
);
true
}
Some(Solution::Ambig(Guidance::Definite(substs))) => {
canonicalized.apply_solution(self, substs);
true
}
Some(_) => true,
None => false,
}
}
pub(crate) fn callable_sig(
&mut self,
ty: &Ty,

View file

@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use infer::{
closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
InferenceResult, OverloadedDeref, PointerCast,
could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,
};
pub use interner::Interner;
pub use lower::{

View file

@ -7,6 +7,7 @@ use std::iter;
use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use stdx::never;
use triomphe::Arc;
@ -36,11 +37,27 @@ pub struct MovedOutOfRef {
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved {
pub ty: Ty,
pub span: MirSpan,
pub local: LocalId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion {
pub local: LocalId,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
}
fn all_mir_bodies(
@ -80,6 +97,8 @@ pub fn borrowck_query(
res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body),
partially_moved: partially_moved(db, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body,
});
})?;
@ -188,6 +207,149 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
result
}
fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
Operand::Copy(p) | Operand::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
ty,
db,
|c, subst, f| {
let (def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
body.owner.module(db.upcast()).krate(),
);
}
if !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
{
result.push(PartiallyMoved { span, ty, local: p.local });
}
}
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, statement.span),
Rvalue::CopyForDeref(_)
| Rvalue::Discriminant(_)
| Rvalue::Len(_)
| Rvalue::Ref(_, _) => (),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, statement.span);
for_operand(o2, statement.span);
}
Rvalue::Aggregate(_, ops) => {
for op in ops.iter() {
for_operand(op, statement.span);
}
}
},
StatementKind::FakeRead(_)
| StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { value, .. } => {
for_operand(value, terminator.span);
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
args.iter().for_each(|it| for_operand(it, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
}
TerminatorKind::Yield { value, .. } => {
for_operand(value, terminator.span);
}
},
None => (),
}
}
result.shrink_to_fit();
result
}
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::Ref(kind, p) => {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
local: p.local,
kind: *kind,
places: vec![statement.span],
});
}
_ => (),
},
_ => (),
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { .. } => {}
TerminatorKind::Call { .. } => {}
_ => (),
},
None => (),
}
}
borrows.into_values().collect()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProjectionCase {
/// Projection is a local

View file

@ -1246,7 +1246,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, op.into(), expr_id.into());
Ok(Some(current))
}
Expr::Underscore => not_supported!("underscore"),
Expr::Underscore => Ok(Some(current)),
}
}

View file

@ -31,6 +31,7 @@ mod has_source;
pub mod db;
pub mod diagnostics;
pub mod symbols;
pub mod term_search;
mod display;
@ -1084,6 +1085,26 @@ impl Field {
Type::new(db, var_id, ty)
}
pub fn ty_with_generics(
&self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let var_id = self.parent.into();
let def_id: AdtId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
VariantDef::Union(it) => it.id.into(),
VariantDef::Variant(it) => it.parent_enum(db).id.into(),
};
let substs = TyBuilder::subst_for_def(db, def_id, None)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
Type::new(db, var_id, ty)
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(
self.ty(db).ty,
@ -1137,6 +1158,20 @@ impl Struct {
Type::from_def(db, self.id)
}
pub fn ty_with_generics(
self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let substs = TyBuilder::subst_for_def(db, self.id, None)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let ty = db.ty(self.id.into()).substitute(Interner, &substs);
Type::new(db, self.id, ty)
}
pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
Type::from_value_def(db, self.id)
}
@ -1228,6 +1263,20 @@ impl Enum {
Type::from_def(db, self.id)
}
pub fn ty_with_generics(
&self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let substs = TyBuilder::subst_for_def(db, self.id, None)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let ty = db.ty(self.id.into()).substitute(Interner, &substs);
Type::new(db, self.id, ty)
}
/// The type of the enum variant bodies.
pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
Type::new_for_crate(
@ -1789,6 +1838,39 @@ impl Function {
Type::new_with_resolver_inner(db, &resolver, ty)
}
pub fn ret_type_with_generics(
self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let resolver = self.id.resolver(db.upcast());
let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
};
let parent_substs = parent_id.map(|id| {
TyBuilder::subst_for_def(db, id, None)
.fill(|_| {
GenericArg::new(
Interner,
GenericArgData::Ty(generics.next().unwrap().ty.clone()),
)
})
.build()
});
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = callable_sig.ret().clone();
Type::new_with_resolver_inner(db, &resolver, ty)
}
pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
if !self.is_async(db) {
return None;
@ -1855,6 +1937,47 @@ impl Function {
.collect()
}
pub fn params_without_self_with_generics(
self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Vec<Param> {
let environment = db.trait_environment(self.id.into());
let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
};
let parent_substs = parent_id.map(|id| {
TyBuilder::subst_for_def(db, id, None)
.fill(|_| {
GenericArg::new(
Interner,
GenericArgData::Ty(generics.next().unwrap().ty.clone()),
)
})
.build()
});
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
callable_sig
.params()
.iter()
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx }
})
.collect()
}
pub fn is_const(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).has_const_kw()
}
@ -1889,6 +2012,11 @@ impl Function {
db.function_data(self.id).attrs.is_bench()
}
/// Is this function marked as unstable with `#[feature]` attribute?
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_unstable()
}
pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
hir_ty::is_fn_unsafe_to_call(db, self.id)
}
@ -2052,6 +2180,36 @@ impl SelfParam {
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty }
}
pub fn ty_with_generics(
&self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => it.into(),
ItemContainerId::TraitId(it) => it.into(),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
panic!("Never get here")
}
};
let parent_substs = TyBuilder::subst_for_def(db, parent_id, None)
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let substs = TyBuilder::subst_for_def(db, self.func, Some(parent_substs))
.fill(|_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone()))
})
.build();
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty }
}
}
impl HasVisibility for Function {
@ -3285,13 +3443,8 @@ impl Impl {
.filter(filter),
)
});
for id in def_crates
.iter()
.flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
.map(|Crate { id }| id)
.chain(def_crates.iter().copied())
.unique()
{
for Crate { id } in Crate::all(db) {
all.extend(
db.trait_impls_in_crate(id)
.for_self_ty_without_blanket_impls(fp)
@ -3520,7 +3673,7 @@ pub enum CaptureKind {
Move,
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Type {
env: Arc<TraitEnvironment>,
ty: Ty,
@ -4374,6 +4527,11 @@ impl Type {
hir_ty::could_unify(db, self.env.clone(), &tys)
}
pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
}
pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
hir_ty::could_coerce(db, self.env.clone(), &tys)

View file

@ -0,0 +1,162 @@
//! Term search
use hir_def::type_ref::Mutability;
use hir_ty::db::HirDatabase;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type};
pub mod type_tree;
pub use type_tree::TypeTree;
mod tactics;
const MAX_VARIATIONS: usize = 10;
#[derive(Debug, Hash, PartialEq, Eq)]
enum NewTypesKey {
ImplMethod,
StructProjection,
}
/// Lookup table for term search
#[derive(Default, Debug)]
struct LookupTable {
data: FxHashMap<Type, FxHashSet<TypeTree>>,
new_types: FxHashMap<NewTypesKey, Vec<Type>>,
exhausted_scopedefs: FxHashSet<ScopeDef>,
round_scopedef_hits: FxHashSet<ScopeDef>,
scopedef_hits: FxHashMap<ScopeDef, u32>,
}
impl LookupTable {
fn new() -> Self {
let mut res: Self = Default::default();
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
res
}
fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<TypeTree>> {
self.data
.iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.iter().cloned().collect())
}
fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<TypeTree>> {
self.data
.iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.iter().cloned().collect())
.or_else(|| {
self.data
.iter()
.find(|(t, _)| {
Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, &ty)
})
.map(|(_, tts)| {
tts.iter().map(|tt| TypeTree::Reference(Box::new(tt.clone()))).collect()
})
})
}
fn insert(&mut self, ty: Type, trees: impl Iterator<Item = TypeTree>) {
match self.data.get_mut(&ty) {
Some(it) => it.extend(trees.take(MAX_VARIATIONS)),
None => {
self.data.insert(ty.clone(), trees.take(MAX_VARIATIONS).collect());
for it in self.new_types.values_mut() {
it.push(ty.clone());
}
}
}
}
fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
self.data.keys().cloned()
}
fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
match self.new_types.get_mut(&key) {
Some(it) => std::mem::take(it),
None => Vec::new(),
}
}
fn mark_exhausted(&mut self, def: ScopeDef) {
self.exhausted_scopedefs.insert(def);
}
fn mark_fulfilled(&mut self, def: ScopeDef) {
self.round_scopedef_hits.insert(def);
}
fn new_round(&mut self) {
for def in &self.round_scopedef_hits {
let hits = self.scopedef_hits.entry(*def).and_modify(|n| *n += 1).or_insert(0);
const MAX_ROUNDS_AFTER_HIT: u32 = 2;
if *hits > MAX_ROUNDS_AFTER_HIT {
self.exhausted_scopedefs.insert(*def);
}
}
self.round_scopedef_hits.clear();
}
fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> {
&self.exhausted_scopedefs
}
}
/// # Term search
///
/// Search for terms (expressions) that unify with the `goal` type.
///
/// # Arguments
/// * `sema` - Semantics for the program
/// * `scope` - Semantic scope, captures context for the term search
/// * `goal` - Target / expected output type
pub fn term_search<DB: HirDatabase>(
sema: &Semantics<'_, DB>,
scope: &SemanticsScope<'_>,
goal: &Type,
) -> Vec<TypeTree> {
let mut defs = FxHashSet::default();
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(scope.module())));
scope.process_all_names(&mut |_, def| {
defs.insert(def);
});
let module = scope.module();
let mut lookup = LookupTable::new();
// Try trivial tactic first, also populates lookup table
let mut solutions: Vec<TypeTree> =
tactics::trivial(sema.db, &defs, &mut lookup, goal).collect();
solutions.extend(tactics::famous_types(sema.db, &module, &defs, &mut lookup, goal));
let mut solution_found = !solutions.is_empty();
for _ in 0..5 {
lookup.new_round();
solutions.extend(tactics::type_constructor(sema.db, &module, &defs, &mut lookup, goal));
solutions.extend(tactics::free_function(sema.db, &module, &defs, &mut lookup, goal));
solutions.extend(tactics::impl_method(sema.db, &module, &defs, &mut lookup, goal));
solutions.extend(tactics::struct_projection(sema.db, &module, &defs, &mut lookup, goal));
if solution_found {
break;
}
solution_found = !solutions.is_empty();
for def in lookup.exhausted_scopedefs() {
defs.remove(def);
}
}
solutions.into_iter().unique().collect()
}

View file

@ -0,0 +1,553 @@
//! Tactics for term search
use hir_def::generics::TypeOrConstParamData;
use hir_ty::db::HirDatabase;
use hir_ty::mir::BorrowKind;
use hir_ty::TyBuilder;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use crate::{
Adt, AssocItem, Enum, GenericParam, HasVisibility, Impl, Module, ModuleDef, ScopeDef, Type,
Variant,
};
use crate::term_search::TypeTree;
use super::{LookupTable, NewTypesKey, MAX_VARIATIONS};
/// Trivial tactic
///
/// Attempts to fulfill the goal by trying items in scope
/// Also works as a starting point to move all items in scope to lookup table
pub(super) fn trivial<'a>(
db: &'a dyn HirDatabase,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
defs.iter().filter_map(|def| {
let tt = match def {
ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(TypeTree::Const(*it)),
ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(TypeTree::Static(*it)),
ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(TypeTree::ConstParam(*it)),
ScopeDef::Local(it) => {
let borrowck = db.borrowck(it.parent).ok()?;
let invalid = borrowck.iter().any(|b| {
b.partially_moved.iter().any(|moved| {
Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id)
}) || b.borrow_regions.iter().any(|region| {
// Shared borrows are fine
Some(&region.local) == b.mir_body.binding_locals.get(it.binding_id)
&& region.kind != BorrowKind::Shared
})
});
if invalid {
return None;
}
Some(TypeTree::Local(*it))
}
_ => None,
}?;
lookup.mark_exhausted(*def);
let ty = tt.ty(db);
lookup.insert(ty.clone(), std::iter::once(tt.clone()));
// Don't suggest local references as they are not valid for return
if matches!(tt, TypeTree::Local(_)) && ty.is_reference() {
return None;
}
ty.could_unify_with_deeply(db, goal).then(|| tt)
})
}
/// Type constructor tactic
///
/// Attempts different type constructors for enums and structs in scope
///
/// # Arguments
/// * `db` - HIR database
/// * `module` - Module where the term search target location
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `goal` - Term search target type
pub(super) fn type_constructor<'a>(
db: &'a dyn HirDatabase,
module: &'a Module,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
fn variant_helper(
db: &dyn HirDatabase,
lookup: &mut LookupTable,
parent_enum: Enum,
variant: Variant,
goal: &Type,
) -> Vec<(Type, Vec<TypeTree>)> {
let generics = db.generic_params(variant.parent_enum(db).id.into());
// Ignore enums with const generics
if generics
.type_or_consts
.values()
.any(|it| matches!(it, TypeOrConstParamData::ConstParamData(_)))
{
return Vec::new();
}
// We currently do not check lifetime bounds so ignore all types that have something to do
// with them
if !generics.lifetimes.is_empty() {
return Vec::new();
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(generics.type_or_consts.len());
generic_params
.filter_map(|generics| {
let enum_ty = parent_enum.ty_with_generics(db, generics.iter().cloned());
if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) {
return None;
}
// Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = variant
.fields(db)
.into_iter()
.map(|field| {
lookup.find(db, &field.ty_with_generics(db, generics.iter().cloned()))
})
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let variant_trees: Vec<TypeTree> = if param_trees.is_empty() {
vec![TypeTree::Variant {
variant,
generics: generics.clone(),
params: Vec::new(),
}]
} else {
param_trees
.into_iter()
.multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Variant {
variant,
generics: generics.clone(),
params,
})
.collect()
};
lookup.insert(enum_ty.clone(), variant_trees.iter().cloned());
Some((enum_ty, variant_trees))
})
.collect()
}
defs.iter()
.filter_map(|def| match def {
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
let variant_trees = variant_helper(db, lookup, it.parent_enum(db), *it, goal);
if variant_trees.is_empty() {
return None;
}
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
Some(variant_trees)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
let trees: Vec<(Type, Vec<TypeTree>)> = enum_
.variants(db)
.into_iter()
.flat_map(|it| variant_helper(db, lookup, enum_.clone(), it, goal))
.collect();
if !trees.is_empty() {
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
}
Some(trees)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
let generics = db.generic_params(it.id.into());
// Ignore enums with const generics
if generics
.type_or_consts
.values()
.any(|it| matches!(it, TypeOrConstParamData::ConstParamData(_)))
{
return None;
}
// We currently do not check lifetime bounds so ignore all types that have something to do
// with them
if !generics.lifetimes.is_empty() {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(generics.type_or_consts.len());
let trees = generic_params
.filter_map(|generics| {
let struct_ty = it.ty_with_generics(db, generics.iter().cloned());
if !generics.is_empty() && !struct_ty.could_unify_with_deeply(db, goal) {
return None;
}
let fileds = it.fields(db);
// Check if all fields are visible, otherwise we cannot fill them
if fileds.iter().any(|it| !it.is_visible_from(db, *module)) {
return None;
}
// Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = fileds
.into_iter()
.map(|field| lookup.find(db, &field.ty(db)))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let struct_trees: Vec<TypeTree> = if param_trees.is_empty() {
vec![TypeTree::Struct { strukt: *it, generics, params: Vec::new() }]
} else {
param_trees
.into_iter()
.multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Struct {
strukt: *it,
generics: generics.clone(),
params,
})
.collect()
};
lookup
.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it))));
lookup.insert(struct_ty.clone(), struct_trees.iter().cloned());
Some((struct_ty, struct_trees))
})
.collect();
Some(trees)
}
_ => None,
})
.flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, goal).then(|| trees))
.flatten()
}
/// Free function tactic
///
/// Attempts to call different functions in scope with parameters from lookup table
///
/// # Arguments
/// * `db` - HIR database
/// * `module` - Module where the term search target location
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `goal` - Term search target type
pub(super) fn free_function<'a>(
db: &'a dyn HirDatabase,
module: &'a Module,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
defs.iter()
.filter_map(|def| match def {
ScopeDef::ModuleDef(ModuleDef::Function(it)) => {
let generics = db.generic_params(it.id.into());
// Skip functions that require const generics
if generics
.type_or_consts
.values()
.any(|it| matches!(it, TypeOrConstParamData::ConstParamData(_)))
{
return None;
}
// Ignore bigger number of generics for now as they kill the performance
// Ignore lifetimes as we do not check them
if generics.type_or_consts.len() > 0 || !generics.lifetimes.is_empty() {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(generics.type_or_consts.len());
let trees: Vec<_> = generic_params
.filter_map(|generics| {
let ret_ty = it.ret_type_with_generics(db, generics.iter().cloned());
// Filter out private and unsafe functions
if !it.is_visible_from(db, *module)
|| it.is_unsafe_to_call(db)
|| it.is_unstable(db)
|| ret_ty.is_reference()
|| ret_ty.is_raw_ptr()
{
return None;
}
// Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = it
.params_without_self_with_generics(db, generics.iter().cloned())
.into_iter()
.map(|field| {
let ty = field.ty();
match ty.is_mutable_reference() {
true => None,
false => lookup.find_autoref(db, &ty),
}
})
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let fn_trees: Vec<TypeTree> = if param_trees.is_empty() {
vec![TypeTree::Function { func: *it, generics, params: Vec::new() }]
} else {
param_trees
.into_iter()
.multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Function {
func: *it,
generics: generics.clone(),
params,
})
.collect()
};
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
lookup.insert(ret_ty.clone(), fn_trees.iter().cloned());
Some((ret_ty, fn_trees))
})
.collect();
Some(trees)
}
_ => None,
})
.flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, goal).then(|| trees))
.flatten()
}
/// Impl method tactic
///
/// Attempts to to call methods on types from lookup table.
/// This includes both functions from direct impl blocks as well as functions from traits.
///
/// # Arguments
/// * `db` - HIR database
/// * `module` - Module where the term search target location
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `goal` - Term search target type
pub(super) fn impl_method<'a>(
db: &'a dyn HirDatabase,
module: &'a Module,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
lookup
.new_types(NewTypesKey::ImplMethod)
.into_iter()
.flat_map(|ty| {
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
})
.flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item)))
.filter_map(|(imp, ty, it)| match it {
AssocItem::Function(f) => Some((imp, ty, f)),
_ => None,
})
.filter_map(|(imp, ty, it)| {
let fn_generics = db.generic_params(it.id.into());
let imp_generics = db.generic_params(imp.id.into());
// Ignore impl if it has const type arguments
if fn_generics
.type_or_consts
.values()
.any(|it| matches!(it, TypeOrConstParamData::ConstParamData(_)))
|| imp_generics
.type_or_consts
.values()
.any(|it| matches!(it, TypeOrConstParamData::ConstParamData(_)))
{
return None;
}
// Ignore all functions that have something to do with lifetimes as we don't check them
if !fn_generics.lifetimes.is_empty() {
return None;
}
// Ignore functions without self param
if !it.has_self_param(db) {
return None;
}
// Filter out private and unsafe functions
if !it.is_visible_from(db, *module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
return None;
}
// Ignore bigger number of generics for now as they kill the performance
if imp_generics.type_or_consts.len() + fn_generics.type_or_consts.len() > 0 {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(imp_generics.type_or_consts.len() + fn_generics.type_or_consts.len());
let trees: Vec<_> = generic_params
.filter_map(|generics| {
let ret_ty = it.ret_type_with_generics(
db,
ty.type_arguments().chain(generics.iter().cloned()),
);
// Filter out functions that return references
if ret_ty.is_reference() || ret_ty.is_raw_ptr() {
return None;
}
// Ignore functions that do not change the type
if ty.could_unify_with_deeply(db, &ret_ty) {
return None;
}
let self_ty = it
.self_param(db)
.expect("No self param")
.ty_with_generics(db, ty.type_arguments().chain(generics.iter().cloned()));
// Ignore functions that have different self type
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
return None;
}
let target_type_trees = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = it
.params_without_self_with_generics(
db,
ty.type_arguments().chain(generics.iter().cloned()),
)
.into_iter()
.map(|field| lookup.find_autoref(db, &field.ty()))
.collect::<Option<_>>()?;
let fn_trees: Vec<TypeTree> = std::iter::once(target_type_trees)
.chain(param_trees.into_iter())
.multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Function { func: it, generics: Vec::new(), params })
.collect();
lookup.insert(ret_ty.clone(), fn_trees.iter().cloned());
Some((ret_ty, fn_trees))
})
.collect();
Some(trees)
})
.flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, goal).then(|| trees))
.flatten()
}
/// Struct projection tactic
///
/// Attempts different struct fields
///
/// # Arguments
/// * `db` - HIR database
/// * `module` - Module where the term search target location
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `goal` - Term search target type
pub(super) fn struct_projection<'a>(
db: &'a dyn HirDatabase,
module: &'a Module,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
lookup
.new_types(NewTypesKey::StructProjection)
.into_iter()
.map(|ty| (ty.clone(), lookup.find(db, &ty).expect("TypeTree not in lookup")))
.flat_map(move |(ty, targets)| {
let module = module.clone();
ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
if !field.is_visible_from(db, module) {
return None;
}
let trees = targets
.clone()
.into_iter()
.map(move |target| TypeTree::Field { field, type_tree: Box::new(target) });
Some((filed_ty, trees))
})
})
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, goal).then(|| trees))
.flatten()
}
/// Famous types tactic
///
/// Attempts different values of well known types such as `true` or `false`
///
/// # Arguments
/// * `db` - HIR database
/// * `module` - Module where the term search target location
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `goal` - Term search target type
pub(super) fn famous_types<'a>(
db: &'a dyn HirDatabase,
module: &'a Module,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
goal: &'a Type,
) -> impl Iterator<Item = TypeTree> + 'a {
[
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" },
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" },
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
]
.into_iter()
.map(|tt| {
lookup.insert(tt.ty(db), std::iter::once(tt.clone()));
tt
})
.filter(|tt| tt.ty(db).could_unify_with_deeply(db, goal))
}

View file

@ -0,0 +1,242 @@
//! Type tree for term search
use hir_def::find_path::PrefixKind;
use hir_ty::{db::HirDatabase, display::HirDisplay};
use itertools::Itertools;
use crate::{
Adt, AsAssocItem, Const, ConstParam, Field, Function, Local, ModuleDef, SemanticsScope, Static,
Struct, StructKind, Trait, Type, Variant,
};
fn mod_item_path(db: &dyn HirDatabase, sema_scope: &SemanticsScope<'_>, def: &ModuleDef) -> String {
// Account for locals shadowing items from module
let name_hit_count = def.name(db).map(|def_name| {
let mut name_hit_count = 0;
sema_scope.process_all_names(&mut |name, _| {
if name == def_name {
name_hit_count += 1;
}
});
name_hit_count
});
let m = sema_scope.module();
let path = match name_hit_count {
Some(0..=1) | None => m.find_use_path(db.upcast(), *def, false, true),
Some(_) => m.find_use_path_prefixed(db.upcast(), *def, PrefixKind::ByCrate, false, true),
};
path.map(|it| it.display(db.upcast()).to_string()).expect("use path error")
}
/// Type tree shows how can we get from set of types to some type.
///
/// Consider the following code as an example
/// ```
/// fn foo(x: i32, y: bool) -> Option<i32> { None }
/// fn bar() {
/// let a = 1;
/// let b = true;
/// let c: Option<i32> = _;
/// }
/// ```
/// If we generate type tree in the place of `_` we get
/// ```txt
/// Option<i32>
/// |
/// foo(i32, bool)
/// / \
/// a: i32 b: bool
/// ```
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
/// scope.
#[derive(Debug, Clone, Eq, Hash, PartialEq)]
pub enum TypeTree {
/// Constant
Const(Const),
/// Static variable
Static(Static),
/// Local variable
Local(Local),
/// Constant generic parameter
ConstParam(ConstParam),
/// Well known type (such as `true` for bool)
FamousType { ty: Type, value: &'static str },
/// Function or method call
Function { func: Function, generics: Vec<Type>, params: Vec<TypeTree> },
/// Enum variant construction
Variant { variant: Variant, generics: Vec<Type>, params: Vec<TypeTree> },
/// Struct construction
Struct { strukt: Struct, generics: Vec<Type>, params: Vec<TypeTree> },
/// Struct field access
Field { type_tree: Box<TypeTree>, field: Field },
/// Passing type as reference (with `&`)
Reference(Box<TypeTree>),
}
impl TypeTree {
pub fn gen_source_code(&self, sema_scope: &SemanticsScope<'_>) -> String {
let db = sema_scope.db;
match self {
TypeTree::Const(it) => mod_item_path(db, sema_scope, &ModuleDef::Const(*it)),
TypeTree::Static(it) => mod_item_path(db, sema_scope, &ModuleDef::Static(*it)),
TypeTree::Local(it) => return it.name(db).display(db.upcast()).to_string(),
TypeTree::ConstParam(it) => return it.name(db).display(db.upcast()).to_string(),
TypeTree::FamousType { value, .. } => return value.to_string(),
TypeTree::Function { func, params, .. } => {
if let Some(self_param) = func.self_param(db) {
let func_name = func.name(db).display(db.upcast()).to_string();
let target = params.first().expect("no self param").gen_source_code(sema_scope);
let args =
params.iter().skip(1).map(|f| f.gen_source_code(sema_scope)).join(", ");
match func.as_assoc_item(db).unwrap().containing_trait_or_trait_impl(db) {
Some(trait_) => {
let trait_name =
mod_item_path(db, sema_scope, &ModuleDef::Trait(trait_));
let target = match self_param.access(db) {
crate::Access::Shared => format!("&{target}"),
crate::Access::Exclusive => format!("&mut {target}"),
crate::Access::Owned => target,
};
match args.is_empty() {
true => format!("{trait_name}::{func_name}({target})",),
false => format!("{trait_name}::{func_name}({target}, {args})",),
}
}
None => format!("{target}.{func_name}({args})"),
}
} else {
let args = params.iter().map(|f| f.gen_source_code(sema_scope)).join(", ");
let fn_name = mod_item_path(db, sema_scope, &ModuleDef::Function(*func));
format!("{fn_name}({args})",)
}
}
TypeTree::Variant { variant, generics, params } => {
let inner = match variant.kind(db) {
StructKind::Tuple => {
let args = params.iter().map(|f| f.gen_source_code(sema_scope)).join(", ");
format!("({args})")
}
StructKind::Record => {
let fields = variant.fields(db);
let args = params
.iter()
.zip(fields.iter())
.map(|(a, f)| {
format!(
"{}: {}",
f.name(db).display(db.upcast()).to_string(),
a.gen_source_code(sema_scope)
)
})
.join(", ");
format!("{{ {args} }}")
}
StructKind::Unit => match generics.is_empty() {
true => String::new(),
false => {
let generics = generics.iter().map(|it| it.display(db)).join(", ");
format!("::<{generics}>")
}
},
};
let prefix = mod_item_path(db, sema_scope, &ModuleDef::Variant(*variant));
format!("{prefix}{inner}")
}
TypeTree::Struct { strukt, generics, params } => {
let inner = match strukt.kind(db) {
StructKind::Tuple => {
let args = params.iter().map(|a| a.gen_source_code(sema_scope)).join(", ");
format!("({args})")
}
StructKind::Record => {
let fields = strukt.fields(db);
let args = params
.iter()
.zip(fields.iter())
.map(|(a, f)| {
format!(
"{}: {}",
f.name(db).display(db.upcast()).to_string(),
a.gen_source_code(sema_scope)
)
})
.join(", ");
format!(" {{ {args} }}")
}
StructKind::Unit => match generics.is_empty() {
true => String::new(),
false => {
let generics = generics.iter().map(|it| it.display(db)).join(", ");
format!("::<{generics}>")
}
},
};
let prefix = mod_item_path(db, sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)));
format!("{prefix}{inner}")
}
TypeTree::Field { type_tree, field } => {
let strukt = type_tree.gen_source_code(sema_scope);
let field = field.name(db).display(db.upcast()).to_string();
format!("{strukt}.{field}")
}
TypeTree::Reference(type_tree) => {
let inner = type_tree.gen_source_code(sema_scope);
format!("&{inner}")
}
}
}
/// Get type of the type tree.
///
/// Same as getting the type of root node
pub fn ty(&self, db: &dyn HirDatabase) -> Type {
match self {
TypeTree::Const(it) => it.ty(db),
TypeTree::Static(it) => it.ty(db),
TypeTree::Local(it) => it.ty(db),
TypeTree::ConstParam(it) => it.ty(db),
TypeTree::FamousType { ty, .. } => ty.clone(),
TypeTree::Function { func, generics, params } => match func.has_self_param(db) {
true => func.ret_type_with_generics(
db,
params[0].ty(db).type_arguments().chain(generics.iter().cloned()),
),
false => func.ret_type_with_generics(db, generics.iter().cloned()),
},
TypeTree::Variant { variant, generics, .. } => {
variant.parent_enum(db).ty_with_generics(db, generics.iter().cloned())
}
TypeTree::Struct { strukt, generics, .. } => {
strukt.ty_with_generics(db, generics.iter().cloned())
}
TypeTree::Field { type_tree, field } => {
field.ty_with_generics(db, type_tree.ty(db).type_arguments())
}
TypeTree::Reference(it) => it.ty(db),
}
}
pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec<Trait> {
let mut res = Vec::new();
match self {
TypeTree::Function { func, params, .. } => {
res.extend(params.iter().flat_map(|it| it.traits_used(db)));
if let Some(it) = func.as_assoc_item(db) {
if let Some(it) = it.containing_trait_or_trait_impl(db) {
res.push(it);
}
}
}
_ => (),
}
res
}
}

View file

@ -0,0 +1,181 @@
//! Term search assist
use ide_db::assists::{AssistId, AssistKind, GroupLabel};
use itertools::Itertools;
use syntax::{ast, AstNode};
use crate::assist_context::{AssistContext, Assists};
pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let syntax = unexpanded.syntax();
let goal_range = syntax.text_range();
let excl = unexpanded.excl_token()?;
let macro_name_token = excl.prev_token()?;
let name = macro_name_token.text();
if name != "todo" {
return None;
}
let parent = syntax.parent()?;
let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted();
let scope = ctx.sema.scope(&parent)?;
let paths = hir::term_search::term_search(&ctx.sema, &scope, &target_ty);
if paths.is_empty() {
return None;
}
for path in paths.iter().unique() {
let code = path.gen_source_code(&scope);
acc.add_group(
&GroupLabel(String::from("Term search")),
AssistId("term_search", AssistKind::Generate),
format!("Replace todo!() with {code}"),
goal_range,
|builder| {
builder.replace(goal_range, code);
},
);
}
Some(())
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_complete_local() {
check_assist(
term_search,
"macro_rules! todo { () => (_) }; fn f() { let a: u128 = 1; let b: u128 = todo$0!() }",
"macro_rules! todo { () => (_) }; fn f() { let a: u128 = 1; let b: u128 = a }",
)
}
#[test]
fn test_complete_todo_with_msg() {
check_assist(
term_search,
"macro_rules! todo { ($($arg:tt)+) => (_) }; fn f() { let a: u128 = 1; let b: u128 = todo$0!(\"asd\") }",
"macro_rules! todo { ($($arg:tt)+) => (_) }; fn f() { let a: u128 = 1; let b: u128 = a }",
)
}
#[test]
fn test_complete_struct_field() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
struct A { pub x: i32, y: bool }
fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
struct A { pub x: i32, y: bool }
fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
)
}
#[test]
fn test_enum_with_generics() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
enum Option<T> { Some(T), None }
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
enum Option<T> { Some(T), None }
fn f() { let a: i32 = 1; let b: Option<i32> = Option::None::<i32>; }"#,
)
}
#[test]
fn test_enum_with_generics2() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
enum Option<T> { None, Some(T) }
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
enum Option<T> { None, Some(T) }
fn f() { let a: i32 = 1; let b: Option<i32> = Option::Some(a); }"#,
)
}
#[test]
fn test_newtype() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
struct Foo(i32);
fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
struct Foo(i32);
fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#,
)
}
#[test]
fn test_shadowing() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#,
)
}
#[test]
fn test_famous_bool() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
fn f() { let a: bool = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
fn f() { let a: bool = false; }"#,
)
}
#[test]
fn test_fn_with_reference_types() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = 1; let b: f32 = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = 1; let b: f32 = f(&a); }"#,
)
}
#[test]
fn test_fn_with_reference_types2() {
check_assist(
term_search,
r#"macro_rules! todo { () => (_) };
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &1; let b: f32 = todo$0!(); }"#,
r#"macro_rules! todo { () => (_) };
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &1; let b: f32 = f(a); }"#,
)
}
#[test]
fn test_fn_with_reference_types3() {
check_assist_not_applicable(
term_search,
r#"macro_rules! todo { () => (_) };
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#,
)
}
}

View file

@ -210,6 +210,7 @@ mod handlers {
mod replace_turbofish_with_explicit_type;
mod sort_items;
mod split_import;
mod term_search;
mod toggle_ignore;
mod unmerge_match_arm;
mod unmerge_use;
@ -332,6 +333,7 @@ mod handlers {
replace_arith_op::replace_arith_with_saturating,
sort_items::sort_items,
split_import::split_import,
term_search::term_search,
toggle_ignore::toggle_ignore,
unmerge_match_arm::unmerge_match_arm,
unmerge_use::unmerge_use,

View file

@ -1,14 +1,17 @@
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind};
use hir::{db::ExpandDatabase, term_search::term_search, ClosureStyle, HirDisplay, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, GroupLabel},
label::Label,
source_change::SourceChange,
RootDatabase,
};
use syntax::AstNode;
use itertools::Itertools;
use text_edit::TextEdit;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
use syntax::AstNode;
// Diagnostic: typed-hole
//
// This diagnostic is triggered when an underscore expression is used in an invalid position.
@ -22,7 +25,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
"invalid `_` expression, expected type `{}`",
d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId),
),
fixes(ctx, d),
fixes(&ctx.sema, d),
)
};
@ -30,56 +33,43 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
.with_fixes(fixes)
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
fn fixes(sema: &Semantics<'_, RootDatabase>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = sema.db;
let root = db.parse_or_expand(d.expr.file_id);
let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let scope = sema.scope(d.expr.value.to_node(&root).syntax())?;
let paths = term_search(sema, &scope, &d.expected);
let mut assists = vec![];
scope.process_all_names(&mut |name, def| {
let ty = match def {
hir::ScopeDef::ModuleDef(it) => match it {
hir::ModuleDef::Function(it) => it.ty(db),
hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => {
it.constructor_ty(db)
}
hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => {
it.constructor_ty(db)
}
hir::ModuleDef::Const(it) => it.ty(db),
hir::ModuleDef::Static(it) => it.ty(db),
_ => return,
},
hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
hir::ScopeDef::Local(it) => it.ty(db),
_ => return,
};
// FIXME: should also check coercions if it is at a coercion site
if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) {
assists.push(Assist {
id: AssistId("typed-hole", AssistKind::QuickFix),
label: Label::new(format!("Replace `_` with `{}`", name.display(db))),
group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())),
target: original_range.range,
source_change: Some(SourceChange::from_text_edit(
original_range.file_id,
TextEdit::replace(original_range.range, name.display(db).to_string()),
)),
trigger_signature_help: false,
});
}
});
if assists.is_empty() {
None
} else {
for path in paths.into_iter().unique() {
let code = path.gen_source_code(&scope);
assists.push(Assist {
id: AssistId("typed-hole", AssistKind::QuickFix),
label: Label::new(format!("Replace `_` with `{}`", &code)),
group: Some(GroupLabel("Replace `_` with a term".to_owned())),
target: original_range.range,
source_change: Some(SourceChange::from_text_edit(
original_range.file_id,
TextEdit::replace(original_range.range, code),
)),
trigger_signature_help: false,
});
}
if !assists.is_empty() {
Some(assists)
} else {
None
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_diagnostics, check_fixes};
use crate::tests::{
check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix,
};
#[test]
fn unknown() {
@ -99,7 +89,7 @@ fn main() {
r#"
fn main() {
if _ {}
//^ error: invalid `_` expression, expected type `bool`
//^ 💡 error: invalid `_` expression, expected type `bool`
let _: fn() -> i32 = _;
//^ error: invalid `_` expression, expected type `fn() -> i32`
let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion*
@ -129,7 +119,7 @@ fn main() {
fn main() {
let mut x = t();
x = _;
//^ 💡 error: invalid `_` expression, expected type `&str`
//^ error: invalid `_` expression, expected type `&str`
x = "";
}
fn t<T>() -> T { loop {} }
@ -143,7 +133,8 @@ fn t<T>() -> T { loop {} }
r#"
fn main() {
let _x = [(); _];
let _y: [(); 10] = [(); _];
// FIXME: This should trigger error
// let _y: [(); 10] = [(); _];
_ = 0;
(_,) = (1,);
}
@ -153,7 +144,7 @@ fn main() {
#[test]
fn check_quick_fix() {
check_fixes(
check_fixes_unordered(
r#"
enum Foo {
Bar
@ -173,6 +164,18 @@ enum Foo {
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = Bar;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
r#"
enum Foo {
Bar
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = local;
@ -209,18 +212,6 @@ enum Foo {
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = Bar;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
r#"
enum Foo {
Bar
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = C;
@ -230,4 +221,149 @@ fn main<const CP: Foo>(param: Foo) {
],
);
}
#[test]
fn local_item_use_trait() {
check_has_fix(
r#"
struct Bar;
trait Foo {
fn foo(self) -> Bar;
}
impl Foo for i32 {
fn foo(self) -> Bar {
unimplemented!()
}
}
fn asd() -> Bar {
let a: i32 = 1;
_$0
}
"#,
r"
struct Bar;
trait Foo {
fn foo(self) -> Bar;
}
impl Foo for i32 {
fn foo(self) -> Bar {
unimplemented!()
}
}
fn asd() -> Bar {
let a: i32 = 1;
Foo::foo(a)
}
",
);
}
#[test]
fn init_struct() {
check_has_fix(
r#"struct Abc {}
struct Qwe { a: i32, b: Abc }
fn main() {
let a: i32 = 1;
let c: Qwe = _$0;
}"#,
r#"struct Abc {}
struct Qwe { a: i32, b: Abc }
fn main() {
let a: i32 = 1;
let c: Qwe = Qwe { a: a, b: Abc { } };
}"#,
);
}
#[test]
fn ignore_impl_func_with_incorrect_return() {
check_has_single_fix(
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Self;
fn foo(&self) -> Self::Res { 1 }
}
fn main() {
let a: i32 = 1;
let c: Bar = _$0;
}"#,
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Self;
fn foo(&self) -> Self::Res { 1 }
}
fn main() {
let a: i32 = 1;
let c: Bar = Bar { };
}"#,
);
}
#[test]
fn use_impl_func_with_correct_return() {
check_has_fix(
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Bar;
fn foo(&self) -> Self::Res { Bar { } }
}
fn main() {
let a: i32 = 1;
let c: Bar = _$0;
}"#,
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Bar;
fn foo(&self) -> Self::Res { Bar { } }
}
fn main() {
let a: i32 = 1;
let c: Bar = Foo::foo(&a);
}"#,
);
}
#[test]
fn local_shadow_fn() {
check_fixes_unordered(
r#"
fn f() {
let f: i32 = 0;
_$0
}"#,
vec![
r#"
fn f() {
let f: i32 = 0;
()
}"#,
r#"
fn f() {
let f: i32 = 0;
crate::f()
}"#,
],
);
}
}

View file

@ -91,6 +91,91 @@ fn check_nth_fix_with_config(
assert_eq_text!(&after, &actual);
}
pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
for ra_fixture_after in ra_fixtures_after.iter() {
check_has_fix(ra_fixture_before, ra_fixture_after)
}
}
#[track_caller]
pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
}
#[track_caller]
pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let mut n_fixes = 0;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
n_fixes += fixes.len();
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
assert!(n_fixes == 1, "Too many fixes suggested");
}
/// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);

View file

@ -32,7 +32,7 @@ use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{AstNode, SyntaxNode};
use vfs::{AbsPathBuf, FileId, Vfs, VfsPath};
@ -91,7 +91,7 @@ impl flags::AnalysisStats {
};
let (host, vfs, _proc_macro) =
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
let db = host.raw_database();
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
eprint!(" (metadata {metadata_time}");
@ -232,7 +232,11 @@ impl flags::AnalysisStats {
}
if self.run_all_ide_things {
self.run_ide_things(host.analysis(), file_ids);
self.run_ide_things(host.analysis(), file_ids.clone());
}
if self.run_term_search {
self.run_term_search(&workspace, db, &vfs, file_ids, verbosity);
}
let total_span = analysis_sw.elapsed();
@ -321,6 +325,196 @@ impl flags::AnalysisStats {
report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
}
fn run_term_search(
&self,
ws: &ProjectWorkspace,
db: &RootDatabase,
vfs: &Vfs,
mut file_ids: Vec<FileId>,
verbosity: Verbosity,
) {
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = match self.no_sysroot {
true => None,
false => Some(RustLibSource::Discover),
};
let mut bar = match verbosity {
Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
_ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
_ => ProgressReport::new(file_ids.len() as u64),
};
file_ids.sort();
file_ids.dedup();
#[derive(Debug, Default)]
struct Acc {
tail_expr_syntax_hits: u64,
tail_expr_no_term: u64,
total_tail_exprs: u64,
error_codes: FxHashMap<String, u32>,
syntax_errors: u32,
}
let mut acc: Acc = Default::default();
bar.tick();
let mut sw = self.stop_watch();
for &file_id in &file_ids {
let sema = hir::Semantics::new(db);
let _ = db.parse(file_id);
let parse = sema.parse(file_id);
let file_txt = db.file_text(file_id);
let path = vfs.file_path(file_id).as_path().unwrap().to_owned();
for node in parse.syntax().descendants() {
let expr = match syntax::ast::Expr::cast(node.clone()) {
Some(it) => it,
None => continue,
};
let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) {
Some(it) => it,
None => continue,
};
let target_ty = match sema.type_of_expr(&expr) {
Some(it) => it.adjusted(),
None => continue, // Failed to infer type
};
let expected_tail = match block.tail_expr() {
Some(it) => it,
None => continue,
};
if expected_tail.is_block_like() {
continue;
}
let range = sema.original_range(&expected_tail.syntax()).range;
let original_text: String = db
.file_text(file_id)
.chars()
.into_iter()
.skip(usize::from(range.start()))
.take(usize::from(range.end()) - usize::from(range.start()))
.collect();
let scope = match sema.scope(&expected_tail.syntax()) {
Some(it) => it,
None => continue,
};
let found_terms = hir::term_search::term_search(&sema, &scope, &target_ty);
if found_terms.is_empty() {
acc.tail_expr_no_term += 1;
acc.total_tail_exprs += 1;
// println!("\n{}\n", &original_text);
continue;
};
fn trim(s: &str) -> String {
s.chars().into_iter().filter(|c| !c.is_whitespace()).collect()
}
let mut syntax_hit_found = false;
for term in found_terms {
let generated = term.gen_source_code(&scope);
syntax_hit_found |= trim(&original_text) == trim(&generated);
// Validate if type-checks
let mut txt = file_txt.to_string();
let edit = ide::TextEdit::replace(range, generated.clone());
edit.apply(&mut txt);
if self.validate_term_search {
std::fs::write(&path, txt).unwrap();
let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap();
if let Some(err) = res.error() {
if err.contains("error: could not compile") {
if let Some(mut err_idx) = err.find("error[E") {
err_idx += 7;
let err_code = &err[err_idx..err_idx + 4];
// if err_code == "0308" {
println!("{}", err);
println!("{}", generated);
// }
acc.error_codes
.entry(err_code.to_owned())
.and_modify(|n| *n += 1)
.or_insert(1);
} else {
acc.syntax_errors += 1;
bar.println(format!("Syntax error here >>>>\n{}", err));
}
}
}
}
}
if syntax_hit_found {
acc.tail_expr_syntax_hits += 1;
}
acc.total_tail_exprs += 1;
let msg = move || {
format!(
"processing: {:<50}",
trim(&original_text).chars().take(50).collect::<String>()
)
};
if verbosity.is_spammy() {
bar.println(msg());
}
bar.set_message(msg);
}
// Revert file back to original state
if self.validate_term_search {
std::fs::write(&path, file_txt.to_string()).unwrap();
}
bar.inc(1);
}
let term_search_time = sw.elapsed();
bar.println(format!(
"Tail Expr syntactic hits: {}/{} ({}%)",
acc.tail_expr_syntax_hits,
acc.total_tail_exprs,
percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs)
));
bar.println(format!(
"Tail Exprs found: {}/{} ({}%)",
acc.total_tail_exprs - acc.tail_expr_no_term,
acc.total_tail_exprs,
percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs)
));
if self.validate_term_search {
bar.println(format!(
"Tail Exprs total errors: {}, syntax errors: {}, error codes:",
acc.error_codes.values().sum::<u32>() + acc.syntax_errors,
acc.syntax_errors,
));
for (err, count) in acc.error_codes {
bar.println(format!(
" E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)"
));
}
}
bar.println(format!(
"Term search avg time: {}ms",
term_search_time.time.as_millis() as u64 / acc.total_tail_exprs
));
bar.println(format!("{:<20} {}", "Term search:", term_search_time));
report_metric("term search time", term_search_time.time.as_millis() as u64, "ms");
bar.finish_and_clear();
}
fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
let mut sw = self.stop_watch();
let mut all = 0;

View file

@ -93,6 +93,10 @@ xflags::xflags! {
/// and annotations. This is useful for benchmarking the memory usage on a project that has
/// been worked on for a bit in a longer running session.
optional --run-all-ide-things
/// Run term search
optional --run-term-search
/// Validate term search by running `cargo check` on every response
optional --validate-term-search
}
/// Run unit tests of the project using mir interpreter
@ -218,6 +222,8 @@ pub struct AnalysisStats {
pub skip_data_layout: bool,
pub skip_const_eval: bool,
pub run_all_ide_things: bool,
pub run_term_search: bool,
pub validate_term_search: bool,
}
#[derive(Debug)]