Auto merge of #149499 - lnicola:sync-from-ra, r=lnicola

`rust-analyzer` subtree update

Subtree update of `rust-analyzer` to d690155841.

Created using https://github.com/rust-lang/josh-sync.

r? `@ghost`
This commit is contained in:
bors 2025-12-01 14:06:10 +00:00
commit 2fb805367d
236 changed files with 7584 additions and 5577 deletions

View file

@ -845,6 +845,7 @@ dependencies = [
name = "hir-expand"
version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"cfg",
"cov-mark",
@ -863,6 +864,7 @@ dependencies = [
"stdx",
"syntax",
"syntax-bridge",
"thin-vec",
"tracing",
"triomphe",
"tt",
@ -905,6 +907,7 @@ dependencies = [
"syntax",
"test-fixture",
"test-utils",
"thin-vec",
"tracing",
"tracing-subscriber",
"tracing-tree",
@ -1475,6 +1478,7 @@ dependencies = [
"parser",
"ra-ap-rustc_lexer",
"rustc-hash 2.1.1",
"salsa",
"smallvec",
"span",
"stdx",
@ -2270,9 +2274,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "rowan"
version = "0.15.15"
version = "0.15.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
dependencies = [
"countme",
"hashbrown 0.14.5",

View file

@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
rust-version = "1.88"
rust-version = "1.91"
edition = "2024"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@ -52,7 +52,7 @@ debug = 2
# local crates
macros = { path = "./crates/macros", version = "0.0.0" }
base-db = { path = "./crates/base-db", version = "0.0.0" }
cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] }
hir = { path = "./crates/hir", version = "0.0.0" }
hir-def = { path = "./crates/hir-def", version = "0.0.0" }
hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@ -132,7 +132,7 @@ process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
rowan = "=0.15.15"
rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.24.0", default-features = false, features = [
@ -170,6 +170,7 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features =
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.5.4"
xshell = "0.2.7"
thin-vec = "0.2.14"
petgraph = { version = "0.8.2", default-features = false }
# We need to freeze the version of the crate, as the raw-api feature is considered unstable

View file

@ -0,0 +1,302 @@
//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
//! is interned (so queries can take it) and remembers its crate.
use core::fmt;
use std::hash::{Hash, Hasher};
use span::Edition;
use vfs::FileId;
use crate::{Crate, RootQueryDb};
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EditionedFileId(
salsa::Id,
std::marker::PhantomData<&'static salsa::plumbing::interned::Value<EditionedFileId>>,
);
const _: () = {
use salsa::plumbing as zalsa_;
use zalsa_::interned as zalsa_struct_;
type Configuration_ = EditionedFileId;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EditionedFileIdData {
editioned_file_id: span::EditionedFileId,
krate: Crate,
}
/// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
/// but this poses us a problem.
///
/// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
/// because that will increase their size, which will increase memory usage significantly.
/// Furthermore, things using spans do not generally need the crate: they are using the
/// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
///
/// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
/// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
/// although same hashes can be used for different items, same file ids used for multiple
/// crates is a rare thing, and different items always have different hashes. Then,
/// when we only have a `span::EditionedFileId`, we use the `intern()` method to
/// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
///
/// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
///
/// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
#[derive(Hash, PartialEq, Eq)]
struct WithoutCrate {
editioned_file_id: span::EditionedFileId,
}
impl Hash for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id.hash(state);
}
}
impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(self, state);
}
#[inline]
fn eq(&self, data: &WithoutCrate) -> bool {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id == data.editioned_file_id
}
}
impl zalsa_::HasJar for EditionedFileId {
type Jar = zalsa_struct_::JarImpl<EditionedFileId>;
const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
}
zalsa_::register_jar! {
zalsa_::ErasedJar::erase::<EditionedFileId>()
}
impl zalsa_struct_::Configuration for EditionedFileId {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "EditionedFileId";
const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
const PERSIST: bool = false;
type Fields<'a> = EditionedFileIdData;
type Struct<'db> = EditionedFileId;
fn serialize<S>(_: &Self::Fields<'_>, _: S) -> Result<S::Ok, S::Error>
where
S: zalsa_::serde::Serializer,
{
unimplemented!("attempted to serialize value that set `PERSIST` to false")
}
fn deserialize<'de, D>(_: D) -> Result<Self::Fields<'static>, D::Error>
where
D: zalsa_::serde::Deserializer<'de>,
{
unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
}
}
impl Configuration_ {
pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl<Self> {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<EditionedFileId>> =
zalsa_::IngredientCache::new();
// SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
// ingredient created by our jar is the struct ingredient.
unsafe {
CACHE.get_or_create(zalsa, || {
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>()
})
}
}
}
impl zalsa_::AsId for EditionedFileId {
fn as_id(&self) -> salsa::Id {
self.0.as_id()
}
}
impl zalsa_::FromId for EditionedFileId {
fn from_id(id: salsa::Id) -> Self {
Self(<salsa::Id>::from_id(id), std::marker::PhantomData)
}
}
unsafe impl Send for EditionedFileId {}
unsafe impl Sync for EditionedFileId {}
impl std::fmt::Debug for EditionedFileId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Self::default_debug_fmt(*self, f)
}
}
impl zalsa_::SalsaStructInDb for EditionedFileId {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>().into()
}
fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator<Item = zalsa_::DatabaseKeyIndex> + '_ {
let _ingredient_index =
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>();
<EditionedFileId>::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
}
#[inline]
fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
if type_id == std::any::TypeId::of::<EditionedFileId>() {
Some(<Self as salsa::plumbing::FromId>::from_id(id))
} else {
None
}
}
#[inline]
unsafe fn memo_table(
zalsa: &zalsa_::Zalsa,
id: zalsa_::Id,
current_revision: zalsa_::Revision,
) -> zalsa_::MemoTableWithTypes<'_> {
// SAFETY: Guaranteed by caller.
unsafe {
zalsa.table().memos::<zalsa_struct_::Value<EditionedFileId>>(id, current_revision)
}
}
}
unsafe impl zalsa_::Update for EditionedFileId {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
if unsafe { *old_pointer } != new_value {
unsafe { *old_pointer = new_value };
true
} else {
false
}
}
}
impl EditionedFileId {
pub fn from_span(
db: &(impl salsa::Database + ?Sized),
editioned_file_id: span::EditionedFileId,
krate: Crate,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
EditionedFileIdData { editioned_file_id, krate },
|_, data| data,
)
}
/// Guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
pub fn from_span_guess_origin(
db: &dyn RootQueryDb,
editioned_file_id: span::EditionedFileId,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
WithoutCrate { editioned_file_id },
|_, _| {
// FileId not in the database.
let krate = db
.relevant_crates(editioned_file_id.file_id())
.first()
.copied()
.or_else(|| db.all_crates().first().copied())
.unwrap_or_else(|| {
// What we're doing here is a bit fishy. We rely on the fact that we only need
// the crate in the item tree, and we should not create an `EditionedFileId`
// without a crate except in cases where it does not matter. The chances that
// `all_crates()` will be empty are also very slim, but it can occur during startup.
// In the very unlikely case that there is a bug and we'll use this crate, Salsa
// will panic.
// SAFETY: 0 is less than `Id::MAX_U32`.
salsa::plumbing::FromId::from_id(unsafe { salsa::Id::from_index(0) })
});
EditionedFileIdData { editioned_file_id, krate }
},
)
}
pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.editioned_file_id
}
pub fn krate(self, db: &dyn salsa::Database) -> Crate {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.krate
}
/// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
zalsa_::with_attached_database(|db| {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
fmt::Debug::fmt(fields, f)
})
.unwrap_or_else(|| {
f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
})
}
}
};
impl EditionedFileId {
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
}
/// Attaches the current edition and guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
#[inline]
pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
#[inline]
pub fn edition(self, db: &dyn salsa::Database) -> Edition {
self.editioned_file_id(db).edition()
}
}

View file

@ -857,9 +857,10 @@ impl CrateGraphBuilder {
}
}
impl BuiltCrateData {
pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
EditionedFileId::new(db, self.root_file_id, self.edition)
impl Crate {
pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
let data = self.data(db);
EditionedFileId::new(db, data.root_file_id, data.edition, self)
}
}

View file

@ -5,6 +5,7 @@ pub use salsa_macros;
// FIXME: Rename this crate, base db is non descriptive
mod change;
mod editioned_file_id;
mod input;
pub mod target;
@ -17,6 +18,7 @@ use std::{
pub use crate::{
change::FileChange,
editioned_file_id::EditionedFileId,
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
@ -29,7 +31,6 @@ pub use query_group::{self};
use rustc_hash::FxHasher;
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use span::Edition;
use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@ -175,42 +176,6 @@ impl Files {
}
}
#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
impl EditionedFileId {
// Salsa already uses the name `new`...
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
}
#[inline]
pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
EditionedFileId::new(db, file_id, Edition::CURRENT)
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
#[inline]
pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
self.editioned_file_id(db).edition()
}
}
#[salsa_macros::input(debug)]
pub struct FileText {
#[returns(ref)]

View file

@ -18,6 +18,7 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
syntax = { workspace = true, optional = true }
intern.workspace = true
[dev-dependencies]

View file

@ -63,6 +63,8 @@ impl From<CfgAtom> for CfgExpr {
}
impl CfgExpr {
// FIXME: Parsing from `tt` is only used in a handful of places, reconsider
// if we should switch them to AST.
#[cfg(feature = "tt")]
pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
@ -73,6 +75,13 @@ impl CfgExpr {
next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
}
#[cfg(feature = "syntax")]
pub fn parse_from_ast(
ast: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> CfgExpr {
next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
}
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
match self {
@ -89,6 +98,56 @@ impl CfgExpr {
}
}
#[cfg(feature = "syntax")]
fn next_cfg_expr_from_ast(
it: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> Option<CfgExpr> {
use intern::sym;
use syntax::{NodeOrToken, SyntaxKind, T, ast};
let name = match it.next() {
None => return None,
Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
Symbol::intern(ident.text())
}
Some(_) => return Some(CfgExpr::Invalid),
};
let ret = match it.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
it.next();
if let Some(NodeOrToken::Token(literal)) = it.peek()
&& matches!(literal.kind(), SyntaxKind::STRING)
{
let literal = tt::token_to_literal(literal.text(), ()).symbol;
it.next();
CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
} else {
return Some(CfgExpr::Invalid);
}
}
Some(NodeOrToken::Node(subtree)) => {
let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
it.next();
let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
match name {
s if s == sym::all => CfgExpr::All(subs.collect()),
s if s == sym::any => CfgExpr::Any(subs.collect()),
s if s == sym::not => {
CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
}
_ => CfgExpr::Invalid,
}
}
_ => CfgAtom::Flag(name).into(),
};
// Eat comma separator
while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
Some(ret)
}
#[cfg(feature = "tt")]
fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
use intern::sym;

View file

@ -1,7 +1,10 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{Expect, expect};
use intern::Symbol;
use syntax::{AstNode, Edition, ast};
use syntax::{
AstNode, Edition,
ast::{self, TokenTreeChildren},
};
use syntax_bridge::{
DocCommentDesugarMode,
dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@ -10,24 +13,33 @@ use syntax_bridge::{
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
#[track_caller]
fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
}
#[track_caller]
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
let cfg = parse_ast_cfg(&tt_ast);
assert_eq!(cfg, expected);
}
#[track_caller]
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -35,13 +47,17 @@ fn check_dnf(input: &str, expect: Expect) {
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
let cfg = parse_ast_cfg(&tt_ast);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}
#[track_caller]
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -50,14 +66,18 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
let cfg = parse_ast_cfg(&tt_ast);
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
}
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -66,6 +86,10 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
let cfg = parse_ast_cfg(&tt_ast);
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
}
#[test]

View file

@ -44,7 +44,8 @@ mbe.workspace = true
cfg.workspace = true
tt.workspace = true
span.workspace = true
thin-vec = "0.2.14"
thin-vec.workspace = true
syntax-bridge.workspace = true
[dev-dependencies]
expect-test.workspace = true
@ -52,7 +53,6 @@ expect-test.workspace = true
# local deps
test-utils.workspace = true
test-fixture.workspace = true
syntax-bridge.workspace = true
[features]
in-rust-tree = ["hir-expand/in-rust-tree"]

View file

@ -1,901 +0,0 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{borrow::Cow, convert::identity, hash::Hash, ops};
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
HirFileId, InFile,
attrs::{Attr, AttrId, RawAttrs, collect_attrs},
span_map::SpanMapRef,
};
use intern::{Symbol, sym};
use la_arena::{ArenaMap, Idx, RawIdx};
use mbe::DelimiterKind;
use rustc_abi::ReprOptions;
use span::AstIdNode;
use syntax::{
AstPtr,
ast::{self, HasAttrs},
};
use triomphe::Arc;
use tt::iter::{TtElement, TtIter};
use crate::{
AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
VariantId,
db::DefDatabase,
item_tree::block_item_tree_query,
lang_item::LangItem,
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource},
};
/// Desugared attributes of an item post `cfg_attr` expansion.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs(RawAttrs);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AttrsWithOwner {
attrs: Attrs,
owner: AttrDefId,
}
impl Attrs {
pub fn new(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Self {
Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
}
pub fn get(&self, id: AttrId) -> Option<&Attr> {
(**self).iter().find(|attr| attr.id == id)
}
pub(crate) fn expand_cfg_attr(
db: &dyn DefDatabase,
krate: Crate,
raw_attrs: RawAttrs,
) -> Attrs {
Attrs(raw_attrs.expand_cfg_attr(db, krate))
}
pub(crate) fn is_cfg_enabled_for(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Result<(), CfgExpr> {
RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
.filter_map(|attr| attr.cfg())
.find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
true => None,
false => Some(cfg),
})
.map_or(Ok(()), Err)
}
}
impl ops::Deref for Attrs {
type Target = [Attr];
fn deref(&self) -> &[Attr] {
&self.0
}
}
impl ops::Deref for AttrsWithOwner {
type Target = Attrs;
fn deref(&self) -> &Attrs {
&self.attrs
}
}
impl Attrs {
pub const EMPTY: Self = Self(RawAttrs::EMPTY);
pub(crate) fn fields_attrs_query(
db: &dyn DefDatabase,
v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
let _p = tracing::info_span!("fields_attrs_query").entered();
let mut res = ArenaMap::default();
let (fields, file_id, krate) = match v {
VariantId::EnumVariantId(it) => {
let loc = it.lookup(db);
let krate = loc.parent.lookup(db).container.krate;
let source = loc.source(db);
(source.value.field_list(), source.file_id, krate)
}
VariantId::StructId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
let source = loc.source(db);
(source.value.field_list(), source.file_id, krate)
}
VariantId::UnionId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
let source = loc.source(db);
(
source.value.record_field_list().map(ast::FieldList::RecordFieldList),
source.file_id,
krate,
)
}
};
let Some(fields) = fields else {
return Arc::new(res);
};
let cfg_options = krate.cfg_options(db);
let span_map = db.span_map(file_id);
match fields {
ast::FieldList::RecordFieldList(fields) => {
let mut idx = 0;
for field in fields.fields() {
let attrs =
Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
if attrs.is_cfg_enabled(cfg_options).is_ok() {
res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
idx += 1;
}
}
}
ast::FieldList::TupleFieldList(fields) => {
let mut idx = 0;
for field in fields.fields() {
let attrs =
Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
if attrs.is_cfg_enabled(cfg_options).is_ok() {
res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
idx += 1;
}
}
}
}
res.shrink_to_fit();
Arc::new(res)
}
}
impl Attrs {
#[inline]
pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
AttrQuery { attrs: self, key }
}
#[inline]
pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
self.iter()
.filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
}
#[inline]
pub fn cfg(&self) -> Option<CfgExpr> {
let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
let first = cfgs.next()?;
match cfgs.next() {
Some(second) => {
let cfgs = [first, second].into_iter().chain(cfgs);
Some(CfgExpr::All(cfgs.collect()))
}
None => Some(first),
}
}
#[inline]
pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
}
#[inline]
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
self.cfgs().try_for_each(|cfg| {
if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
})
}
#[inline]
pub fn lang(&self) -> Option<&Symbol> {
self.by_key(sym::lang).string_value()
}
#[inline]
pub fn lang_item(&self) -> Option<LangItem> {
self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol)
}
#[inline]
pub fn has_doc_hidden(&self) -> bool {
self.by_key(sym::doc).tt_values().any(|tt| {
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
})
}
#[inline]
pub fn has_doc_notable_trait(&self) -> bool {
self.by_key(sym::doc).tt_values().any(|tt| {
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
})
}
#[inline]
pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ {
self.by_key(sym::doc).tt_values().map(DocExpr::parse)
}
#[inline]
pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ {
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
}
#[inline]
pub fn export_name(&self) -> Option<&Symbol> {
self.by_key(sym::export_name).string_value()
}
#[inline]
pub fn is_proc_macro(&self) -> bool {
self.by_key(sym::proc_macro).exists()
}
#[inline]
pub fn is_proc_macro_attribute(&self) -> bool {
self.by_key(sym::proc_macro_attribute).exists()
}
#[inline]
pub fn is_proc_macro_derive(&self) -> bool {
self.by_key(sym::proc_macro_derive).exists()
}
#[inline]
pub fn is_test(&self) -> bool {
self.iter().any(|it| {
it.path()
.segments()
.iter()
.rev()
.zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
.all(|it| it.0 == it.1)
})
}
#[inline]
pub fn is_ignore(&self) -> bool {
self.by_key(sym::ignore).exists()
}
#[inline]
pub fn is_bench(&self) -> bool {
self.by_key(sym::bench).exists()
}
#[inline]
pub fn is_unstable(&self) -> bool {
self.by_key(sym::unstable).exists()
}
#[inline]
pub fn rustc_legacy_const_generics(&self) -> Option<Box<Box<[u32]>>> {
self.by_key(sym::rustc_legacy_const_generics)
.tt_values()
.next()
.map(parse_rustc_legacy_const_generics)
.filter(|it| !it.is_empty())
.map(Box::new)
}
#[inline]
pub fn repr(&self) -> Option<ReprOptions> {
self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
acc.map_or(Some(repr), |mut acc| {
merge_repr(&mut acc, repr);
Some(acc)
})
})
}
}
fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
let mut indices = Vec::new();
let mut iter = tt.iter();
while let (Some(first), second) = (iter.next(), iter.next()) {
match first {
TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
Ok(index) => indices.push(index),
Err(_) => break,
},
_ => break,
}
if let Some(comma) = second {
match comma {
TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
_ => break,
}
}
}
indices.into_boxed_slice()
}
fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
flags.insert(other.flags);
*align = (*align).max(other.align);
*pack = match (*pack, other.pack) {
(Some(pack), None) | (None, Some(pack)) => Some(pack),
_ => (*pack).min(other.pack),
};
if other.int.is_some() {
*int = other.int;
}
}
fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
use crate::builtin_type::{BuiltinInt, BuiltinUint};
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
match tt.top_subtree().delimiter {
tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
_ => return None,
}
let mut acc = ReprOptions::default();
let mut tts = tt.iter();
while let Some(tt) = tts.next() {
let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
continue;
};
let repr = match &ident.sym {
s if *s == sym::packed => {
let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
tts.next();
if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
lit.symbol.as_str().parse().unwrap_or_default()
} else {
0
}
} else {
0
};
let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
ReprOptions { pack, ..Default::default() }
}
s if *s == sym::align => {
let mut align = None;
if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
tts.next();
if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
&& let Ok(a) = lit.symbol.as_str().parse()
{
align = Align::from_bytes(a).ok();
}
}
ReprOptions { align, ..Default::default() }
}
s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
s if *s == sym::transparent => {
ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
}
s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
repr => {
let mut int = None;
if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
.map(Either::Left)
.or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
{
int = Some(match builtin {
Either::Left(bi) => match bi {
BuiltinInt::Isize => IntegerType::Pointer(true),
BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
},
Either::Right(bu) => match bu {
BuiltinUint::Usize => IntegerType::Pointer(false),
BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
},
});
}
ReprOptions { int, ..Default::default() }
}
};
merge_repr(&mut acc, repr);
}
Some(acc)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DocAtom {
/// eg. `#[doc(hidden)]`
Flag(Symbol),
/// eg. `#[doc(alias = "it")]`
///
/// Note that a key can have multiple values that are all considered "active" at the same time.
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
KeyValue { key: Symbol, value: Symbol },
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DocExpr {
Invalid,
/// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
Atom(DocAtom),
/// eg. `#[doc(alias("x", "y"))]`
Alias(Vec<Symbol>),
}
impl From<DocAtom> for DocExpr {
fn from(atom: DocAtom) -> Self {
DocExpr::Atom(atom)
}
}
impl DocExpr {
fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> DocExpr {
next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
}
pub fn aliases(&self) -> &[Symbol] {
match self {
DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => {
std::slice::from_ref(value)
}
DocExpr::Alias(aliases) => aliases,
_ => &[],
}
}
}
fn next_doc_expr<S: Copy>(mut it: TtIter<'_, S>) -> Option<DocExpr> {
let name = match it.next() {
None => return None,
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
Some(_) => return Some(DocExpr::Invalid),
};
// Peek
let ret = match it.peek() {
Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
it.next();
match it.next() {
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
kind: tt::LitKind::Str,
..
}))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
_ => return Some(DocExpr::Invalid),
}
}
Some(TtElement::Subtree(_, subtree_iter)) => {
it.next();
let subs = parse_comma_sep(subtree_iter);
match &name {
s if *s == sym::alias => DocExpr::Alias(subs),
_ => DocExpr::Invalid,
}
}
_ => DocAtom::Flag(name).into(),
};
Some(ret)
}
fn parse_comma_sep<S>(iter: TtIter<'_, S>) -> Vec<Symbol> {
iter.filter_map(|tt| match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str, symbol, ..
})) => Some(symbol.clone()),
_ => None,
})
.collect()
}
impl AttrsWithOwner {
pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self {
Self { attrs: db.attrs(owner), owner }
}
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
let _p = tracing::info_span!("attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
match def {
AttrDefId::ModuleId(module) => {
let def_map = module.def_map(db);
let mod_data = &def_map[module.local_id];
let raw_attrs = match mod_data.origin {
ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
let decl_attrs = declaration_tree_id
.item_tree(db)
.raw_attrs(declaration.upcast())
.clone();
let tree = db.file_item_tree(definition.into());
let def_attrs = tree.top_level_raw_attrs().clone();
decl_attrs.merge(def_attrs)
}
ModuleOrigin::CrateRoot { definition } => {
let tree = db.file_item_tree(definition.into());
tree.top_level_raw_attrs().clone()
}
ModuleOrigin::Inline { definition_tree_id, definition } => {
definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
}
ModuleOrigin::BlockExpr { id, .. } => {
let tree = block_item_tree_query(db, id);
tree.top_level_raw_attrs().clone()
}
};
Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
}
AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
},
AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::MacroId(it) => match it {
MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
},
AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
},
AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
}
}
pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
let owner = match self.owner {
AttrDefId::ModuleId(module) => {
// Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
let def_map = module.def_map(db);
let mod_data = &def_map[module.local_id];
match mod_data.declaration_source(db) {
Some(it) => {
let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
mod_data.definition_source(db)
{
map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
file_id, &file,
)));
}
return map;
}
None => {
let InFile { file_id, value } = mod_data.definition_source(db);
let attrs_owner = match &value {
ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
};
return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
}
}
}
AttrDefId::FieldId(id) => {
let map = db.fields_attrs_source_map(id.parent);
let file_id = id.parent.file_id(db);
let root = db.parse_or_expand(file_id);
let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
InFile::new(file_id, owner)
}
AttrDefId::AdtId(adt) => match adt {
AdtId::StructId(id) => any_has_attrs(db, id),
AdtId::UnionId(id) => any_has_attrs(db, id),
AdtId::EnumId(id) => any_has_attrs(db, id),
},
AttrDefId::FunctionId(id) => any_has_attrs(db, id),
AttrDefId::EnumVariantId(id) => any_has_attrs(db, id),
AttrDefId::StaticId(id) => any_has_attrs(db, id),
AttrDefId::ConstId(id) => any_has_attrs(db, id),
AttrDefId::TraitId(id) => any_has_attrs(db, id),
AttrDefId::TypeAliasId(id) => any_has_attrs(db, id),
AttrDefId::MacroId(id) => match id {
MacroId::Macro2Id(id) => any_has_attrs(db, id),
MacroId::MacroRulesId(id) => any_has_attrs(db, id),
MacroId::ProcMacroId(id) => any_has_attrs(db, id),
},
AttrDefId::ImplId(id) => any_has_attrs(db, id),
AttrDefId::GenericParamId(id) => match id {
GenericParamId::ConstParamId(id) => id
.parent()
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
GenericParamId::TypeParamId(id) => id
.parent()
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
GenericParamId::LifetimeParamId(id) => id
.parent
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
},
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
AttrDefId::UseId(id) => any_has_attrs(db, id),
};
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
}
}
#[derive(Debug)]
pub struct AttrSourceMap {
source: Vec<Either<ast::Attr, ast::Comment>>,
file_id: HirFileId,
/// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
/// while `file_id` will be the one of the module declaration site.
/// The usize is the index into `source` from which point on the entries reside in the def site
/// file.
mod_def_site_file_id: Option<(HirFileId, usize)>,
}
impl AttrSourceMap {
fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
Self {
source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
file_id: owner.file_id,
mod_def_site_file_id: None,
}
}
/// Append a second source map to this one, this is required for modules, whose outline and inline
/// attributes can reside in different files
fn append_module_inline_attrs(&mut self, other: Self) {
assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
let len = self.source.len();
self.source.extend(other.source);
if other.file_id != self.file_id {
self.mod_def_site_file_id = Some((other.file_id, len));
}
}
/// Maps the lowered `Attr` back to its original syntax node.
///
/// `attr` must come from the `owner` used for AttrSourceMap
///
/// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
/// the attribute represented by `Attr`.
pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
self.source_of_id(attr.id)
}
pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
let ast_idx = id.ast_index();
let file_id = match self.mod_def_site_file_id {
Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
_ => self.file_id,
};
self.source
.get(ast_idx)
.map(|it| InFile::new(file_id, it))
.unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
}
}
#[derive(Debug, Clone)]
pub struct AttrQuery<'attr> {
attrs: &'attr Attrs,
key: Symbol,
}
impl<'attr> AttrQuery<'attr> {
#[inline]
pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
self.attrs().filter_map(|attr| attr.token_tree_value())
}
#[inline]
pub fn string_value(self) -> Option<&'attr Symbol> {
self.attrs().find_map(|attr| attr.string_value())
}
#[inline]
pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
self.attrs().find_map(|attr| attr.string_value_with_span())
}
#[inline]
pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
self.attrs().find_map(|attr| attr.string_value_unescape())
}
#[inline]
pub fn exists(self) -> bool {
self.attrs().next().is_some()
}
#[inline]
pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
let key = self.key;
self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
}
/// Find string value for a specific key inside token tree
///
/// ```ignore
/// #[doc(html_root_url = "url")]
/// ^^^^^^^^^^^^^ key
/// ```
#[inline]
pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
self.tt_values().find_map(|tt| {
let name = tt.iter()
.skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
.nth(2);
match name {
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
_ => None
}
})
}
}
fn any_has_attrs<'db>(
db: &(dyn DefDatabase + 'db),
id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
) -> InFile<ast::AnyHasAttrs> {
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
}
fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
) -> Attrs {
let loc = lookup.lookup(db);
let source = loc.source(db);
let span_map = db.span_map(source.file_id);
let cfg_options = loc.krate(db).cfg_options(db);
Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
}
pub(crate) fn fields_attrs_source_map(
db: &dyn DefDatabase,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
let mut res = ArenaMap::default();
let child_source = def.child_source(db);
for (idx, variant) in child_source.value.iter() {
res.insert(
idx,
variant
.as_ref()
.either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
);
}
Arc::new(res)
}
#[cfg(test)]
mod tests {
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use intern::Symbol;
use span::EditionedFileId;
use triomphe::Arc;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use span::FileId;
use syntax::{AstNode, TextRange, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
EditionedFileId::current_edition(FileId::from_raw(0)),
)));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::ProcMacro,
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
#[test]
fn test_doc_expr_parser() {
assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into());
assert_parse_result(
r#"#![doc(alias = "foo")]"#,
DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(),
);
assert_parse_result(
r#"#![doc(alias("foo"))]"#,
DocExpr::Alias([Symbol::intern("foo")].into()),
);
assert_parse_result(
r#"#![doc(alias("foo", "bar", "baz"))]"#,
DocExpr::Alias(
[Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(),
),
);
assert_parse_result(
r#"
#[doc(alias("Bar", "Qux"))]
struct Foo;"#,
DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()),
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,36 +1,32 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either;
use hir_expand::{
EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
db::ExpandDatabase,
};
use intern::sym;
use la_arena::ArenaMap;
use syntax::{AstPtr, ast};
use triomphe::Arc;
use crate::{
AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId,
TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
attr::{Attrs, AttrsWithOwner},
AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId,
EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc,
InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
VariantId,
attrs::AttrFlags,
expr_store::{
Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
},
hir::generics::GenericParams,
import_map::ImportMap,
item_tree::{ItemTree, file_item_tree_query},
lang_item::{self, LangItem},
nameres::crate_def_map,
signatures::{
ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
StructSignature, TraitSignature, TypeAliasSignature, UnionSignature,
},
tt,
visibility::{self, Visibility},
};
@ -238,28 +234,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
def: GenericDefId,
) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>);
// region:attrs
#[salsa::invoke(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query?
#[salsa::invoke(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map(
&self,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
#[salsa::transparent]
#[salsa::invoke(lang_item::lang_attr)]
fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
// endregion:attrs
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
@ -302,36 +276,9 @@ fn include_macro_invoc(
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.top_level_raw_attrs();
for attr in &**attrs {
match attr.path().as_ident() {
Some(ident) if *ident == sym::no_std => return true,
Some(ident) if *ident == sym::cfg_attr => {}
_ => continue,
}
// This is a `cfg_attr`; check if it could possibly expand to `no_std`.
// Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
let tt = match attr.token_tree_value() {
Some(tt) => tt.token_trees(),
None => continue,
};
let segments =
tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
for output in segments.skip(1) {
match output.flat_tokens() {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
return true;
}
_ => {}
}
}
}
false
let root_module = CrateRootModuleId::from(crate_id).module(db);
let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module)));
attrs.contains(AttrFlags::IS_NO_STD)
}
fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {

View file

@ -17,11 +17,10 @@ use syntax::{AstNode, Parse, ast};
use triomphe::Arc;
use tt::TextRange;
use crate::attr::Attrs;
use crate::expr_store::HygieneId;
use crate::macro_call_as_call_id;
use crate::nameres::DefMap;
use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
use crate::{
MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId,
macro_call_as_call_id, nameres::DefMap,
};
#[derive(Debug)]
pub(super) struct Expander {
@ -70,11 +69,10 @@ impl Expander {
pub(super) fn is_cfg_enabled(
&self,
db: &dyn DefDatabase,
has_attrs: &dyn HasAttrs,
owner: &dyn HasAttrs,
cfg_options: &CfgOptions,
) -> Result<(), cfg::CfgExpr> {
Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
AttrFlags::is_cfg_enabled_for(owner, cfg_options)
}
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {

View file

@ -5,14 +5,13 @@ mod asm;
mod generics;
mod path;
use std::mem;
use std::{cell::OnceCell, mem};
use base_db::FxIndexSet;
use cfg::CfgOptions;
use either::Either;
use hir_expand::{
HirFileId, InFile, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
span_map::SpanMapRef,
};
@ -34,6 +33,7 @@ use tt::TextRange;
use crate::{
AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
attrs::AttrFlags,
builtin_type::BuiltinUint,
db::DefDatabase,
expr_store::{
@ -57,7 +57,7 @@ use crate::{
},
item_scope::BuiltinShadowMode,
item_tree::FieldsShape,
lang_item::LangItem,
lang_item::{LangItemTarget, LangItems},
nameres::{DefMap, LocalDefMap, MacroSubNs, block_def_map},
type_ref::{
ArrayType, ConstRef, FnType, LifetimeRef, LifetimeRefId, Mutability, PathId, Rawness,
@ -87,14 +87,16 @@ pub(super) fn lower_body(
let mut params = vec![];
let mut collector = ExprCollector::new(db, module, current_file_id);
let skip_body = match owner {
DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
DefWithBodyId::StaticId(it) => db.attrs(it.into()),
DefWithBodyId::ConstId(it) => db.attrs(it.into()),
DefWithBodyId::VariantId(it) => db.attrs(it.into()),
}
.rust_analyzer_tool()
.any(|attr| *attr.path() == tool_path![skip]);
let skip_body = AttrFlags::query(
db,
match owner {
DefWithBodyId::FunctionId(it) => it.into(),
DefWithBodyId::StaticId(it) => it.into(),
DefWithBodyId::ConstId(it) => it.into(),
DefWithBodyId::VariantId(it) => it.into(),
},
)
.contains(AttrFlags::RUST_ANALYZER_SKIP);
// If #[rust_analyzer::skip] annotated, only construct enough information for the signature
// and skip the body.
if skip_body {
@ -416,6 +418,7 @@ pub struct ExprCollector<'db> {
def_map: &'db DefMap,
local_def_map: &'db LocalDefMap,
module: ModuleId,
lang_items: OnceCell<&'db LangItems>,
pub store: ExpressionStoreBuilder,
// state stuff
@ -513,7 +516,7 @@ impl BindingList {
}
}
impl ExprCollector<'_> {
impl<'db> ExprCollector<'db> {
pub fn new(
db: &dyn DefDatabase,
module: ModuleId,
@ -527,6 +530,7 @@ impl ExprCollector<'_> {
module,
def_map,
local_def_map,
lang_items: OnceCell::new(),
store: ExpressionStoreBuilder::default(),
expander,
current_try_block_label: None,
@ -539,6 +543,11 @@ impl ExprCollector<'_> {
}
}
#[inline]
pub(crate) fn lang_items(&self) -> &'db LangItems {
self.lang_items.get_or_init(|| crate::lang_item::lang_items(self.db, self.module.krate))
}
#[inline]
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
self.expander.span_map()
@ -1654,7 +1663,7 @@ impl ExprCollector<'_> {
/// `try { <stmts>; }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(()) }`
/// and save the `<new_label>` to use it as a break target for desugaring of the `?` operator.
fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
let try_from_output = self.lang_path(LangItem::TryTraitFromOutput);
let try_from_output = self.lang_path(self.lang_items().TryTraitFromOutput);
let label = self.alloc_label_desugared(Label {
name: Name::generate_new_name(self.store.labels.len()),
});
@ -1753,10 +1762,11 @@ impl ExprCollector<'_> {
/// }
/// ```
fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
let into_iter_fn = self.lang_path(LangItem::IntoIterIntoIter);
let iter_next_fn = self.lang_path(LangItem::IteratorNext);
let option_some = self.lang_path(LangItem::OptionSome);
let option_none = self.lang_path(LangItem::OptionNone);
let lang_items = self.lang_items();
let into_iter_fn = self.lang_path(lang_items.IntoIterIntoIter);
let iter_next_fn = self.lang_path(lang_items.IteratorNext);
let option_some = self.lang_path(lang_items.OptionSome);
let option_none = self.lang_path(lang_items.OptionNone);
let head = self.collect_expr_opt(e.iterable());
let into_iter_fn_expr =
self.alloc_expr(into_iter_fn.map_or(Expr::Missing, Expr::Path), syntax_ptr);
@ -1836,10 +1846,11 @@ impl ExprCollector<'_> {
/// }
/// ```
fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
let try_branch = self.lang_path(LangItem::TryTraitBranch);
let cf_continue = self.lang_path(LangItem::ControlFlowContinue);
let cf_break = self.lang_path(LangItem::ControlFlowBreak);
let try_from_residual = self.lang_path(LangItem::TryTraitFromResidual);
let lang_items = self.lang_items();
let try_branch = self.lang_path(lang_items.TryTraitBranch);
let cf_continue = self.lang_path(lang_items.ControlFlowContinue);
let cf_break = self.lang_path(lang_items.ControlFlowBreak);
let try_from_residual = self.lang_path(lang_items.TryTraitFromResidual);
let operand = self.collect_expr_opt(e.expr());
let try_branch = self.alloc_expr(try_branch.map_or(Expr::Missing, Expr::Path), syntax_ptr);
let expr = self
@ -2489,7 +2500,7 @@ impl ExprCollector<'_> {
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
/// not.
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options);
match enabled {
Ok(()) => true,
Err(cfg) => {
@ -2773,11 +2784,10 @@ impl ExprCollector<'_> {
// Assume that rustc version >= 1.89.0 iff lang item `format_arguments` exists
// but `format_unsafe_arg` does not
let fmt_args =
|| crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatArguments);
let fmt_unsafe_arg =
|| crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatUnsafeArg);
let use_format_args_since_1_89_0 = fmt_args().is_some() && fmt_unsafe_arg().is_none();
let lang_items = self.lang_items();
let fmt_args = lang_items.FormatArguments;
let fmt_unsafe_arg = lang_items.FormatUnsafeArg;
let use_format_args_since_1_89_0 = fmt_args.is_some() && fmt_unsafe_arg.is_none();
let idx = if use_format_args_since_1_89_0 {
self.collect_format_args_impl(syntax_ptr, fmt, argmap, lit_pieces, format_options)
@ -2856,16 +2866,13 @@ impl ExprCollector<'_> {
// unsafe { ::core::fmt::UnsafeArg::new() }
// )
let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
self.db,
self.module.krate(),
let lang_items = self.lang_items();
let new_v1_formatted = self.ty_rel_lang_path(
lang_items.FormatArguments,
Name::new_symbol_root(sym::new_v1_formatted),
);
let unsafe_arg_new = LangItem::FormatUnsafeArg.ty_rel_path(
self.db,
self.module.krate(),
Name::new_symbol_root(sym::new),
);
let unsafe_arg_new =
self.ty_rel_lang_path(lang_items.FormatUnsafeArg, Name::new_symbol_root(sym::new));
let new_v1_formatted =
self.alloc_expr_desugared(new_v1_formatted.map_or(Expr::Missing, Expr::Path));
@ -3044,9 +3051,8 @@ impl ExprCollector<'_> {
// )
// }
let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
self.db,
self.module.krate(),
let new_v1_formatted = self.ty_rel_lang_path(
self.lang_items().FormatArguments,
Name::new_symbol_root(sym::new_v1_formatted),
);
let new_v1_formatted =
@ -3099,6 +3105,7 @@ impl ExprCollector<'_> {
placeholder: &FormatPlaceholder,
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
) -> ExprId {
let lang_items = self.lang_items();
let position = match placeholder.argument.index {
Ok(arg_index) => {
let (i, _) =
@ -3159,15 +3166,14 @@ impl ExprCollector<'_> {
let width =
RecordLitField { name: Name::new_symbol_root(sym::width), expr: width_expr };
self.alloc_expr_desugared(Expr::RecordLit {
path: LangItem::FormatPlaceholder.path(self.db, self.module.krate()).map(Box::new),
path: self.lang_path(lang_items.FormatPlaceholder).map(Box::new),
fields: Box::new([position, flags, precision, width]),
spread: None,
})
} else {
let format_placeholder_new = {
let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
self.db,
self.module.krate(),
let format_placeholder_new = self.ty_rel_lang_path(
lang_items.FormatPlaceholder,
Name::new_symbol_root(sym::new),
);
match format_placeholder_new {
@ -3188,9 +3194,8 @@ impl ExprCollector<'_> {
)));
let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
let align = {
let align = LangItem::FormatAlignment.ty_rel_path(
self.db,
self.module.krate(),
let align = self.ty_rel_lang_path(
lang_items.FormatAlignment,
match alignment {
Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left),
Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right),
@ -3234,6 +3239,7 @@ impl ExprCollector<'_> {
count: &Option<FormatCount>,
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
) -> ExprId {
let lang_items = self.lang_items();
match count {
Some(FormatCount::Literal(n)) => {
let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
@ -3241,11 +3247,9 @@ impl ExprCollector<'_> {
// FIXME: Change this to Some(BuiltinUint::U16) once we drop support for toolchains < 1.88
None,
)));
let count_is = match LangItem::FormatCount.ty_rel_path(
self.db,
self.module.krate(),
Name::new_symbol_root(sym::Is),
) {
let count_is = match self
.ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Is))
{
Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)),
None => self.missing_expr(),
};
@ -3259,11 +3263,9 @@ impl ExprCollector<'_> {
i as u128,
Some(BuiltinUint::Usize),
)));
let count_param = match LangItem::FormatCount.ty_rel_path(
self.db,
self.module.krate(),
Name::new_symbol_root(sym::Param),
) {
let count_param = match self
.ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Param))
{
Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
None => self.missing_expr(),
};
@ -3277,11 +3279,9 @@ impl ExprCollector<'_> {
self.missing_expr()
}
}
None => match LangItem::FormatCount.ty_rel_path(
self.db,
self.module.krate(),
Name::new_symbol_root(sym::Implied),
) {
None => match self
.ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Implied))
{
Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
None => self.missing_expr(),
},
@ -3299,9 +3299,8 @@ impl ExprCollector<'_> {
use ArgumentType::*;
use FormatTrait::*;
let new_fn = match LangItem::FormatArgument.ty_rel_path(
self.db,
self.module.krate(),
let new_fn = match self.ty_rel_lang_path(
self.lang_items().FormatArgument,
Name::new_symbol_root(match ty {
Format(Display) => sym::new_display,
Format(Debug) => sym::new_debug,
@ -3323,8 +3322,16 @@ impl ExprCollector<'_> {
// endregion: format
fn lang_path(&self, lang: LangItem) -> Option<Path> {
lang.path(self.db, self.module.krate())
fn lang_path(&self, lang: Option<impl Into<LangItemTarget>>) -> Option<Path> {
Some(Path::LangItem(lang?.into(), None))
}
fn ty_rel_lang_path(
&self,
lang: Option<impl Into<LangItemTarget>>,
relative_name: Name,
) -> Option<Path> {
Some(Path::LangItem(lang?.into(), Some(relative_name)))
}
}

View file

@ -12,7 +12,8 @@ use span::Edition;
use syntax::ast::{HasName, RangeOp};
use crate::{
AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId,
attrs::AttrFlags,
expr_store::path::{GenericArg, GenericArgs},
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@ -167,7 +168,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::AdtId(id) => match id {
AdtId::StructId(id) => {
let signature = db.struct_signature(id);
print_struct(db, &signature, edition)
print_struct(db, id, &signature, edition)
}
AdtId::UnionId(id) => {
format!("unimplemented {id:?}")
@ -179,7 +180,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
GenericDefId::FunctionId(id) => {
let signature = db.function_signature(id);
print_function(db, &signature, edition)
print_function(db, id, &signature, edition)
}
GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
@ -208,7 +209,8 @@ pub fn print_path(
pub fn print_struct(
db: &dyn DefDatabase,
StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
id: StructId,
StructSignature { name, generic_params, store, flags, shape }: &StructSignature,
edition: Edition,
) -> String {
let mut p = Printer {
@ -219,7 +221,7 @@ pub fn print_struct(
line_format: LineFormat::Newline,
edition,
};
if let Some(repr) = repr {
if let Some(repr) = AttrFlags::repr(db, id.into()) {
if repr.c() {
wln!(p, "#[repr(C)]");
}
@ -255,7 +257,8 @@ pub fn print_struct(
pub fn print_function(
db: &dyn DefDatabase,
FunctionSignature {
id: FunctionId,
signature @ FunctionSignature {
name,
generic_params,
store,
@ -263,10 +266,10 @@ pub fn print_function(
ret_type,
abi,
flags,
legacy_const_generics_indices,
}: &FunctionSignature,
edition: Edition,
) -> String {
let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id);
let mut p = Printer {
db,
store,
@ -298,7 +301,7 @@ pub fn print_function(
if i != 0 {
w!(p, ", ");
}
if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) {
w!(p, "const: ");
}
p.print_type_ref(*param);
@ -1091,15 +1094,15 @@ impl Printer<'_> {
}};
}
match *it {
LangItemTarget::ImplDef(it) => w!(self, "{it:?}"),
LangItemTarget::ImplId(it) => w!(self, "{it:?}"),
LangItemTarget::EnumId(it) => write_name!(it),
LangItemTarget::Function(it) => write_name!(it),
LangItemTarget::Static(it) => write_name!(it),
LangItemTarget::Struct(it) => write_name!(it),
LangItemTarget::Union(it) => write_name!(it),
LangItemTarget::TypeAlias(it) => write_name!(it),
LangItemTarget::Trait(it) => write_name!(it),
LangItemTarget::EnumVariant(it) => write_name!(it),
LangItemTarget::FunctionId(it) => write_name!(it),
LangItemTarget::StaticId(it) => write_name!(it),
LangItemTarget::StructId(it) => write_name!(it),
LangItemTarget::UnionId(it) => write_name!(it),
LangItemTarget::TypeAliasId(it) => write_name!(it),
LangItemTarget::TraitId(it) => write_name!(it),
LangItemTarget::EnumVariantId(it) => write_name!(it),
}
if let Some(s) = s {

View file

@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);

View file

@ -38,14 +38,24 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe
match def {
GenericDefId::AdtId(adt_id) => match adt_id {
crate::AdtId::StructId(struct_id) => {
out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
out += &print_struct(
&db,
struct_id,
&db.struct_signature(struct_id),
Edition::CURRENT,
);
}
crate::AdtId::UnionId(_id) => (),
crate::AdtId::EnumId(_id) => (),
},
GenericDefId::ConstId(_id) => (),
GenericDefId::FunctionId(function_id) => {
out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
out += &print_function(
&db,
function_id,
&db.function_signature(function_id),
Edition::CURRENT,
)
}
GenericDefId::ImplId(_id) => (),

View file

@ -45,7 +45,7 @@ pub type PatId = Idx<Pat>;
// FIXME: Encode this as a single u32, we won't ever reach all 32 bits especially given these counts
// are local to the body.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, salsa::Update)]
pub enum ExprOrPatId {
ExprId(ExprId),
PatId(PatId),

View file

@ -13,7 +13,8 @@ use stdx::format_to;
use triomphe::Arc;
use crate::{
AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId,
attrs::AttrFlags,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::{DefMap, assoc::TraitItems, crate_def_map},
@ -165,17 +166,34 @@ impl ImportMap {
}
} else {
match item {
ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
ItemInNs::Types(id) | ItemInNs::Values(id) => match id {
ModuleDefId::ModuleId(it) => {
Some(AttrDefId::ModuleId(InternedModuleId::new(db, it)))
}
ModuleDefId::FunctionId(it) => Some(it.into()),
ModuleDefId::AdtId(it) => Some(it.into()),
ModuleDefId::EnumVariantId(it) => Some(it.into()),
ModuleDefId::ConstId(it) => Some(it.into()),
ModuleDefId::StaticId(it) => Some(it.into()),
ModuleDefId::TraitId(it) => Some(it.into()),
ModuleDefId::TypeAliasId(it) => Some(it.into()),
ModuleDefId::MacroId(it) => Some(it.into()),
ModuleDefId::BuiltinType(_) => None,
},
ItemInNs::Macros(id) => Some(id.into()),
}
};
let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
None => (false, false, Complete::Yes),
Some(attr_id) => {
let attrs = db.attrs(attr_id);
let attrs = AttrFlags::query(db, attr_id);
let do_not_complete =
Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
(attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs);
(
attrs.contains(AttrFlags::IS_DOC_HIDDEN),
attrs.contains(AttrFlags::IS_UNSTABLE),
do_not_complete,
)
}
};
@ -239,15 +257,15 @@ impl ImportMap {
};
let attr_id = item.into();
let attrs = &db.attrs(attr_id);
let attrs = AttrFlags::query(db, attr_id);
let item_do_not_complete = Complete::extract(false, attrs);
let do_not_complete =
Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN),
is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE),
complete: do_not_complete,
};

View file

@ -30,6 +30,7 @@
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
//! surface syntax.
mod attrs;
mod lower;
mod pretty;
#[cfg(test)]
@ -43,10 +44,8 @@ use std::{
};
use ast::{AstNode, StructKind};
use base_db::Crate;
use hir_expand::{
ExpandTo, HirFileId,
attrs::RawAttrs,
mod_path::{ModPath, PathKind},
name::Name,
};
@ -59,9 +58,12 @@ use syntax::{SyntaxKind, ast, match_ast};
use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
use crate::{BlockId, Lookup, db::DefDatabase};
pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
pub(crate) use crate::item_tree::{
attrs::*,
lower::{lower_use_tree, visibility_from_ast},
};
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct RawVisibilityId(u32);
@ -96,7 +98,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
let top_attrs = ctx.lower_attrs(&file);
let mut item_tree = ctx.lower_module_items(&file);
item_tree.top_attrs = top_attrs;
item_tree
@ -132,7 +134,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
top_attrs: RawAttrs::EMPTY,
top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@ -168,7 +170,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
top_attrs: RawAttrs::EMPTY,
top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@ -182,8 +184,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
#[derive(Debug, Default, Eq, PartialEq)]
pub struct ItemTree {
top_level: Box<[ModItemId]>,
top_attrs: RawAttrs,
attrs: FxHashMap<FileAstId<ast::Item>, RawAttrs>,
top_attrs: AttrsOrCfg,
attrs: FxHashMap<FileAstId<ast::Item>, AttrsOrCfg>,
vis: ItemVisibilities,
big_data: FxHashMap<FileAstId<ast::Item>, BigModItem>,
small_data: FxHashMap<FileAstId<ast::Item>, SmallModItem>,
@ -197,26 +199,12 @@ impl ItemTree {
}
/// Returns the inner attributes of the source file.
pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg {
&self.top_attrs
}
/// Returns the inner attributes of the source file.
pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
}
pub(crate) fn raw_attrs(&self, of: FileAstId<ast::Item>) -> &RawAttrs {
self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
}
pub(crate) fn attrs(
&self,
db: &dyn DefDatabase,
krate: Crate,
of: FileAstId<ast::Item>,
) -> Attrs {
Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
pub(crate) fn attrs(&self, of: FileAstId<ast::Item>) -> Option<&AttrsOrCfg> {
self.attrs.get(&of)
}
/// Returns a count of a few, expensive items.

View file

@ -0,0 +1,220 @@
//! Defines attribute helpers for name resolution.
//!
//! Notice we don't preserve all attributes for name resolution, to save space:
//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes)
//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`].
use std::{
borrow::Cow,
convert::Infallible,
ops::{self, ControlFlow},
};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
mod_path::ModPath,
name::Name,
span_map::SpanMapRef,
};
use intern::{Interned, Symbol, sym};
use syntax::{AstNode, T, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
use crate::{db::DefDatabase, item_tree::lower::Ctx};
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum AttrsOrCfg {
Enabled {
attrs: AttrsOwned,
},
/// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.)
CfgDisabled(Box<(CfgExpr, AttrsOwned)>),
}
impl Default for AttrsOrCfg {
#[inline]
fn default() -> Self {
AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
}
}
impl AttrsOrCfg {
pub(crate) fn lower<'a>(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
cfg_options: &dyn Fn() -> &'a CfgOptions,
span_map: SpanMapRef<'_>,
) -> AttrsOrCfg {
let mut attrs = Vec::new();
let result =
collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
// NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
// tracking.
let (span, path_range, input) = match meta {
Meta::NamedKeyValue { path_range, name: _, value } => {
let span = span_map.span_for_range(path_range);
let input = value.map(|value| {
Box::new(AttrInput::Literal(token_to_literal(
value.text(),
span_map.span_for_range(value.text_range()),
)))
});
(span, path_range, input)
}
Meta::TokenTree { path, tt } => {
let span = span_map.span_for_range(path.range);
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
let input = Some(Box::new(AttrInput::TokenTree(tt)));
(span, path.range, input)
}
Meta::Path { path } => {
let span = span_map.span_for_range(path.range);
(span, path.range, None)
}
};
let path = container.token_at_offset(path_range.start()).right_biased().and_then(
|first_path_token| {
let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
let segments =
std::iter::successors(Some(first_path_token), |it| it.next_token())
.take_while(|it| it.text_range().end() <= path_range.end())
.filter(|it| it.kind().is_any_identifier());
ModPath::from_tokens(
db,
&mut |range| span_map.span_for_range(range).ctx,
is_abs,
segments,
)
},
);
let path = path.unwrap_or_else(|| Name::missing().into());
attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
ControlFlow::Continue(())
});
let attrs = AttrsOwned(attrs.into_boxed_slice());
match result {
Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
None => AttrsOrCfg::Enabled { attrs },
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct AttrsOwned(Box<[Attr]>);
#[derive(Debug, Clone, Copy)]
pub(crate) struct Attrs<'a>(&'a [Attr]);
impl ops::Deref for Attrs<'_> {
type Target = [Attr];
#[inline]
fn deref(&self) -> &Self::Target {
self.0
}
}
impl Ctx<'_> {
#[inline]
pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg {
AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map())
}
}
impl AttrsOwned {
#[inline]
pub(crate) fn as_ref(&self) -> Attrs<'_> {
Attrs(&self.0)
}
}
impl<'a> Attrs<'a> {
pub(crate) const EMPTY: Self = Attrs(&[]);
#[inline]
pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> {
AttrQuery { attrs: self, key }
}
#[inline]
pub(crate) fn iter(self) -> impl Iterator<Item = (AttrId, &'a Attr)> {
self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr))
}
#[inline]
pub(crate) fn iter_after(
self,
after: Option<AttrId>,
) -> impl Iterator<Item = (AttrId, &'a Attr)> {
let skip = after.map_or(0, |after| after.item_tree_index() + 1);
self.0[skip as usize..]
.iter()
.enumerate()
.map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr))
}
#[inline]
pub(crate) fn is_proc_macro(&self) -> bool {
self.by_key(sym::proc_macro).exists()
}
#[inline]
pub(crate) fn is_proc_macro_attribute(&self) -> bool {
self.by_key(sym::proc_macro_attribute).exists()
}
}
#[derive(Debug, Clone)]
pub(crate) struct AttrQuery<'attr> {
attrs: Attrs<'attr>,
key: Symbol,
}
impl<'attr> AttrQuery<'attr> {
#[inline]
pub(crate) fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
self.attrs().filter_map(|attr| attr.token_tree_value())
}
#[inline]
pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
self.attrs().find_map(|attr| attr.string_value_with_span())
}
#[inline]
pub(crate) fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
self.attrs().find_map(|attr| attr.string_value_unescape())
}
#[inline]
pub(crate) fn exists(self) -> bool {
self.attrs().next().is_some()
}
#[inline]
pub(crate) fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
let key = self.key;
self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
}
}
impl AttrsOrCfg {
#[inline]
pub(super) fn empty() -> Self {
AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
}
#[inline]
pub(super) fn is_empty(&self) -> bool {
matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty())
}
}

View file

@ -1,8 +1,9 @@
//! AST -> `ItemTree` lowering code.
use std::{cell::OnceCell, collections::hash_map::Entry};
use std::cell::OnceCell;
use base_db::FxIndexSet;
use cfg::CfgOptions;
use hir_expand::{
HirFileId,
mod_path::PathKind,
@ -22,18 +23,19 @@ use crate::{
item_tree::{
BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
VisibilityExplicitness,
ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct,
StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness,
attrs::AttrsOrCfg,
},
};
pub(super) struct Ctx<'a> {
db: &'a dyn DefDatabase,
pub(super) db: &'a dyn DefDatabase,
tree: ItemTree,
source_ast_id_map: Arc<AstIdMap>,
span_map: OnceCell<SpanMap>,
file: HirFileId,
cfg_options: OnceCell<&'a CfgOptions>,
top_level: Vec<ModItemId>,
visibilities: FxIndexSet<RawVisibility>,
}
@ -45,12 +47,18 @@ impl<'a> Ctx<'a> {
tree: ItemTree::default(),
source_ast_id_map: db.ast_id_map(file),
file,
cfg_options: OnceCell::new(),
span_map: OnceCell::new(),
visibilities: FxIndexSet::default(),
top_level: Vec::new(),
}
}
#[inline]
pub(super) fn cfg_options(&self) -> &'a CfgOptions {
self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
}
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
}
@ -98,7 +106,7 @@ impl<'a> Ctx<'a> {
}
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
self.tree.top_attrs = self.lower_attrs(block);
self.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@ -144,22 +152,15 @@ impl<'a> Ctx<'a> {
// FIXME: Handle `global_asm!()`.
ast::Item::AsmExpr(_) => return None,
};
let attrs = RawAttrs::new(self.db, item, self.span_map());
let attrs = self.lower_attrs(item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
}
fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: RawAttrs) {
fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: AttrsOrCfg) {
if !attrs.is_empty() {
match self.tree.attrs.entry(item) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
Entry::Vacant(entry) => {
entry.insert(attrs);
}
}
self.tree.attrs.insert(item, attrs);
}
}
@ -352,7 +353,7 @@ impl<'a> Ctx<'a> {
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
let attrs = RawAttrs::new(self.db, &item, self.span_map());
let attrs = self.lower_attrs(&item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
})

View file

@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId};
use crate::{
item_tree::{
Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct,
Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg,
},
visibility::RawVisibility,
};
@ -85,9 +85,13 @@ impl Printer<'_> {
}
}
fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
let AttrsOrCfg::Enabled { attrs } = attrs else {
w!(self, "#[cfg(false)]{separated_by}");
return;
};
let inner = if inner { "!" } else { "" };
for attr in &**attrs {
for attr in &*attrs.as_ref() {
w!(
self,
"#{}[{}{}]{}",

View file

@ -30,10 +30,8 @@ use crate::{A, B};
use a::{c, d::{e}};
"#,
expect![[r##"
#![doc = " file comment"]
expect![[r#"
#![no_std]
#![doc = " another file comment"]
// AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
@ -47,13 +45,12 @@ use a::{c, d::{e}};
// AstId: Use[0000, 1]
pub(self) use globs::*;
#[doc = " docs on import"]
// AstId: Use[0000, 2]
pub(self) use crate::{A, B};
// AstId: Use[0000, 3]
pub(self) use a::{c, d::{e}};
"##]],
"#]],
);
}
@ -195,8 +192,6 @@ mod inline {
mod outline;
"#,
expect![[r##"
#[doc = " outer"]
#[doc = " inner"]
// AstId: Module[03AE, 0]
pub(self) mod inline {
// AstId: Use[0000, 0]

View file

@ -2,100 +2,36 @@
//!
//! This attribute to tell the compiler about semi built-in std library
//! features, such as Fn family of traits.
use hir_expand::name::Name;
use intern::{Symbol, sym};
use rustc_hash::FxHashMap;
use stdx::impl_from;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
StaticId, StructId, TraitId, TypeAliasId, UnionId,
attrs::AttrFlags,
db::DefDatabase,
expr_store::path::Path,
nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangItemTarget {
EnumId(EnumId),
Function(FunctionId),
ImplDef(ImplId),
Static(StaticId),
Struct(StructId),
Union(UnionId),
TypeAlias(TypeAliasId),
Trait(TraitId),
EnumVariant(EnumVariantId),
FunctionId(FunctionId),
ImplId(ImplId),
StaticId(StaticId),
StructId(StructId),
UnionId(UnionId),
TypeAliasId(TypeAliasId),
TraitId(TraitId),
EnumVariantId(EnumVariantId),
}
impl LangItemTarget {
pub fn as_enum(self) -> Option<EnumId> {
match self {
LangItemTarget::EnumId(id) => Some(id),
_ => None,
}
}
pub fn as_function(self) -> Option<FunctionId> {
match self {
LangItemTarget::Function(id) => Some(id),
_ => None,
}
}
pub fn as_impl_def(self) -> Option<ImplId> {
match self {
LangItemTarget::ImplDef(id) => Some(id),
_ => None,
}
}
pub fn as_static(self) -> Option<StaticId> {
match self {
LangItemTarget::Static(id) => Some(id),
_ => None,
}
}
pub fn as_struct(self) -> Option<StructId> {
match self {
LangItemTarget::Struct(id) => Some(id),
_ => None,
}
}
pub fn as_trait(self) -> Option<TraitId> {
match self {
LangItemTarget::Trait(id) => Some(id),
_ => None,
}
}
pub fn as_enum_variant(self) -> Option<EnumVariantId> {
match self {
LangItemTarget::EnumVariant(id) => Some(id),
_ => None,
}
}
pub fn as_type_alias(self) -> Option<TypeAliasId> {
match self {
LangItemTarget::TypeAlias(id) => Some(id),
_ => None,
}
}
pub fn as_adt(self) -> Option<AdtId> {
match self {
LangItemTarget::Union(it) => Some(it.into()),
LangItemTarget::EnumId(it) => Some(it.into()),
LangItemTarget::Struct(it) => Some(it.into()),
_ => None,
}
}
}
impl_from!(
EnumId, FunctionId, ImplId, StaticId, StructId, UnionId, TypeAliasId, TraitId, EnumVariantId for LangItemTarget
);
/// Salsa query. This will look for lang items in a specific crate.
#[salsa_macros::tracked(returns(ref))]
#[salsa_macros::tracked(returns(as_deref))]
pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
let _p = tracing::info_span!("crate_lang_items_query").entered();
@ -105,15 +41,11 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() {
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
lang_items.collect_lang_item(db, impl_def);
for &(_, assoc) in impl_def.impl_items(db).items.iter() {
match assoc {
AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function)
}
AssocItemId::TypeAliasId(t) => {
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias)
}
AssocItemId::FunctionId(f) => lang_items.collect_lang_item(db, f),
AssocItemId::TypeAliasId(t) => lang_items.collect_lang_item(db, t),
AssocItemId::ConstId(_) => (),
}
}
@ -122,62 +54,55 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
for def in module_data.scope.declarations() {
match def {
ModuleDefId::TraitId(trait_) => {
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
lang_items.collect_lang_item(db, trait_);
TraitItems::query(db, trait_).items.iter().for_each(|&(_, assoc_id)| {
match assoc_id {
AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
lang_items.collect_lang_item(db, f);
}
AssocItemId::TypeAliasId(alias) => {
lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
lang_items.collect_lang_item(db, alias)
}
AssocItemId::ConstId(_) => {}
}
});
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
lang_items.collect_lang_item(db, e);
e.enum_variants(db).variants.iter().for_each(|&(id, _, _)| {
lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
lang_items.collect_lang_item(db, id);
});
}
ModuleDefId::AdtId(AdtId::StructId(s)) => {
lang_items.collect_lang_item(db, s, LangItemTarget::Struct);
lang_items.collect_lang_item(db, s);
}
ModuleDefId::AdtId(AdtId::UnionId(u)) => {
lang_items.collect_lang_item(db, u, LangItemTarget::Union);
lang_items.collect_lang_item(db, u);
}
ModuleDefId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
lang_items.collect_lang_item(db, f);
}
ModuleDefId::StaticId(s) => {
lang_items.collect_lang_item(db, s, LangItemTarget::Static);
lang_items.collect_lang_item(db, s);
}
ModuleDefId::TypeAliasId(t) => {
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias);
lang_items.collect_lang_item(db, t);
}
_ => {}
}
}
}
if lang_items.items.is_empty() { None } else { Some(Box::new(lang_items)) }
if lang_items.is_empty() { None } else { Some(Box::new(lang_items)) }
}
/// Salsa query. Look for a lang item, starting from the specified crate and recursively
/// Salsa query. Look for a lang items, starting from the specified crate and recursively
/// traversing its dependencies.
#[salsa_macros::tracked]
pub fn lang_item(
db: &dyn DefDatabase,
start_crate: Crate,
item: LangItem,
) -> Option<LangItemTarget> {
let _p = tracing::info_span!("lang_item_query").entered();
if let Some(target) =
crate_lang_items(db, start_crate).as_ref().and_then(|it| it.items.get(&item).copied())
{
return Some(target);
}
#[salsa_macros::tracked(returns(ref))]
pub fn lang_items(db: &dyn DefDatabase, start_crate: Crate) -> LangItems {
let _p = tracing::info_span!("lang_items_query").entered();
let mut result = crate_lang_items(db, start_crate).cloned().unwrap_or_default();
// Our `CrateGraph` eagerly inserts sysroot dependencies like `core` or `std` into dependencies
// even if the target crate has `#![no_std]`, `#![no_core]` or shadowed sysroot dependencies
@ -186,42 +111,29 @@ pub fn lang_item(
// while nameres.
//
// See https://github.com/rust-lang/rust-analyzer/pull/20475 for details.
crate_local_def_map(db, start_crate).local(db).extern_prelude().find_map(|(_, (krate, _))| {
for (_, (krate, _)) in crate_local_def_map(db, start_crate).local(db).extern_prelude() {
// Some crates declares themselves as extern crate like `extern crate self as core`.
// Ignore these to prevent cycles.
if krate.krate == start_crate { None } else { lang_item(db, krate.krate, item) }
})
}
if krate.krate != start_crate {
result.merge_prefer_self(lang_items(db, krate.krate));
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct LangItems {
items: FxHashMap<LangItem, LangItemTarget>,
result
}
impl LangItems {
pub fn target(&self, item: LangItem) -> Option<LangItemTarget> {
self.items.get(&item).copied()
}
fn collect_lang_item<T>(
&mut self,
db: &dyn DefDatabase,
item: T,
constructor: fn(T) -> LangItemTarget,
) where
T: Into<AttrDefId> + Copy,
fn collect_lang_item<T>(&mut self, db: &dyn DefDatabase, item: T)
where
T: Into<AttrDefId> + Into<LangItemTarget> + Copy,
{
let _p = tracing::info_span!("collect_lang_item").entered();
if let Some(lang_item) = lang_attr(db, item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) {
self.assign_lang_item(lang_item, item.into());
}
}
}
pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
db.attrs(item).lang_item()
}
#[salsa::tracked(returns(as_deref))]
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
let mut traits = Vec::new();
@ -231,7 +143,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
for (_, module_data) in crate_def_map.modules() {
for def in module_data.scope.declarations() {
if let ModuleDefId::TraitId(trait_) = def
&& db.attrs(trait_.into()).has_doc_notable_trait()
&& AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
{
traits.push(trait_);
}
@ -249,30 +161,62 @@ pub enum GenericRequirement {
macro_rules! language_item_table {
(
$( $(#[$attr:meta])* $variant:ident, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
$LangItems:ident =>
$( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $method:ident, $target:ident, $generics:expr; )*
) => {
/// A representation of all the valid language items in Rust.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum LangItem {
#[allow(non_snake_case)] // FIXME: Should we remove this?
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
pub struct $LangItems {
$(
#[doc = concat!("The `", stringify!($name), "` lang item.")]
$(#[$attr])*
$variant,
pub $lang_item: Option<$target>,
)*
}
impl LangItem {
pub fn name(self) -> &'static str {
impl LangItems {
fn is_empty(&self) -> bool {
$( self.$lang_item.is_none() )&&*
}
/// Merges `self` with `other`, with preference to `self` items.
fn merge_prefer_self(&mut self, other: &Self) {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
}
fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
match name {
$(
_ if name == $module::$name => {
if let LangItemTarget::$target(target) = target {
self.$lang_item = Some(target);
}
}
)*
_ => {}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangItemEnum {
$(
$(#[$attr])*
$lang_item,
)*
}
impl LangItemEnum {
#[inline]
pub fn from_lang_items(self, lang_items: &LangItems) -> Option<LangItemTarget> {
match self {
$( LangItem::$variant => stringify!($name), )*
$( LangItemEnum::$lang_item => lang_items.$lang_item.map(Into::into), )*
}
}
/// Opposite of [`LangItem::name`]
pub fn from_symbol(sym: &Symbol) -> Option<Self> {
match sym {
$(sym if *sym == $module::$name => Some(LangItem::$variant), )*
#[inline]
pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
match symbol {
$( _ if *symbol == $module::$name => Some(Self::$lang_item), )*
_ => None,
}
}
@ -280,142 +224,101 @@ macro_rules! language_item_table {
}
}
impl LangItem {
pub fn resolve_function(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<FunctionId> {
lang_item(db, start_crate, self).and_then(|t| t.as_function())
}
pub fn resolve_trait(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<TraitId> {
lang_item(db, start_crate, self).and_then(|t| t.as_trait())
}
pub fn resolve_adt(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<AdtId> {
lang_item(db, start_crate, self).and_then(|t| t.as_adt())
}
pub fn resolve_enum(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<EnumId> {
lang_item(db, start_crate, self).and_then(|t| t.as_enum())
}
pub fn resolve_type_alias(
self,
db: &dyn DefDatabase,
start_crate: Crate,
) -> Option<TypeAliasId> {
lang_item(db, start_crate, self).and_then(|t| t.as_type_alias())
}
/// Opposite of [`LangItem::name`]
pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
Self::from_symbol(name.symbol())
}
pub fn path(&self, db: &dyn DefDatabase, start_crate: Crate) -> Option<Path> {
let t = lang_item(db, start_crate, *self)?;
Some(Path::LangItem(t, None))
}
pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -> Option<Path> {
let t = lang_item(db, start_crate, *self)?;
Some(Path::LangItem(t, Some(seg)))
}
}
language_item_table! {
language_item_table! { LangItems =>
// Variant name, Name, Getter method name, Target Generic requirements;
Sized, sym::sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
MetaSized, sym::meta_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
PointeeSized, sym::pointee_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1);
Sized, sym::sized, sized_trait, TraitId, GenericRequirement::Exact(0);
MetaSized, sym::meta_sized, sized_trait, TraitId, GenericRequirement::Exact(0);
PointeeSized, sym::pointee_sized, sized_trait, TraitId, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, TraitId, GenericRequirement::Minimum(1);
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None;
StructuralPeq, sym::structural_peq, structural_peq_trait, TraitId, GenericRequirement::None;
/// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize).
StructuralTeq, sym::structural_teq, structural_teq_trait, Target::Trait, GenericRequirement::None;
Copy, sym::copy, copy_trait, Target::Trait, GenericRequirement::Exact(0);
Clone, sym::clone, clone_trait, Target::Trait, GenericRequirement::None;
Sync, sym::sync, sync_trait, Target::Trait, GenericRequirement::Exact(0);
DiscriminantKind, sym::discriminant_kind, discriminant_kind_trait, Target::Trait, GenericRequirement::None;
StructuralTeq, sym::structural_teq, structural_teq_trait, TraitId, GenericRequirement::None;
Copy, sym::copy, copy_trait, TraitId, GenericRequirement::Exact(0);
Clone, sym::clone, clone_trait, TraitId, GenericRequirement::None;
Sync, sym::sync, sync_trait, TraitId, GenericRequirement::Exact(0);
DiscriminantKind, sym::discriminant_kind, discriminant_kind_trait, TraitId, GenericRequirement::None;
/// The associated item of the [`DiscriminantKind`] trait.
Discriminant, sym::discriminant_type, discriminant_type, Target::AssocTy, GenericRequirement::None;
Discriminant, sym::discriminant_type, discriminant_type, TypeAliasId, GenericRequirement::None;
PointeeTrait, sym::pointee_trait, pointee_trait, Target::Trait, GenericRequirement::None;
Metadata, sym::metadata_type, metadata_type, Target::AssocTy, GenericRequirement::None;
DynMetadata, sym::dyn_metadata, dyn_metadata, Target::Struct, GenericRequirement::None;
PointeeTrait, sym::pointee_trait, pointee_trait, TraitId, GenericRequirement::None;
Metadata, sym::metadata_type, metadata_type, TypeAliasId, GenericRequirement::None;
DynMetadata, sym::dyn_metadata, dyn_metadata, StructId, GenericRequirement::None;
Freeze, sym::freeze, freeze_trait, Target::Trait, GenericRequirement::Exact(0);
Freeze, sym::freeze, freeze_trait, TraitId, GenericRequirement::Exact(0);
FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, Target::Trait, GenericRequirement::Exact(0);
FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, TraitId, GenericRequirement::Exact(0);
FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, FunctionId, GenericRequirement::None;
Drop, sym::drop, drop_trait, Target::Trait, GenericRequirement::None;
Destruct, sym::destruct, destruct_trait, Target::Trait, GenericRequirement::None;
Drop, sym::drop, drop_trait, TraitId, GenericRequirement::None;
Destruct, sym::destruct, destruct_trait, TraitId, GenericRequirement::None;
CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, Target::Trait, GenericRequirement::Minimum(1);
DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, Target::Trait, GenericRequirement::Minimum(1);
CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, TraitId, GenericRequirement::Minimum(1);
DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, TraitId, GenericRequirement::Minimum(1);
// language items relating to transmutability
TransmuteOpts, sym::transmute_opts, transmute_opts, Target::Struct, GenericRequirement::Exact(0);
TransmuteTrait, sym::transmute_trait, transmute_trait, Target::Trait, GenericRequirement::Exact(3);
TransmuteOpts, sym::transmute_opts, transmute_opts, StructId, GenericRequirement::Exact(0);
TransmuteTrait, sym::transmute_trait, transmute_trait, TraitId, GenericRequirement::Exact(3);
Add, sym::add, add_trait, Target::Trait, GenericRequirement::Exact(1);
Sub, sym::sub, sub_trait, Target::Trait, GenericRequirement::Exact(1);
Mul, sym::mul, mul_trait, Target::Trait, GenericRequirement::Exact(1);
Div, sym::div, div_trait, Target::Trait, GenericRequirement::Exact(1);
Rem, sym::rem, rem_trait, Target::Trait, GenericRequirement::Exact(1);
Neg, sym::neg, neg_trait, Target::Trait, GenericRequirement::Exact(0);
Not, sym::not, not_trait, Target::Trait, GenericRequirement::Exact(0);
BitXor, sym::bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1);
BitAnd, sym::bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1);
BitOr, sym::bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1);
Shl, sym::shl, shl_trait, Target::Trait, GenericRequirement::Exact(1);
Shr, sym::shr, shr_trait, Target::Trait, GenericRequirement::Exact(1);
AddAssign, sym::add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1);
SubAssign, sym::sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1);
MulAssign, sym::mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1);
DivAssign, sym::div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1);
RemAssign, sym::rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitAndAssign, sym::bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitOrAssign, sym::bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShlAssign, sym::shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShrAssign, sym::shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1);
Index, sym::index, index_trait, Target::Trait, GenericRequirement::Exact(1);
IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
Add, sym::add, add_trait, TraitId, GenericRequirement::Exact(1);
Sub, sym::sub, sub_trait, TraitId, GenericRequirement::Exact(1);
Mul, sym::mul, mul_trait, TraitId, GenericRequirement::Exact(1);
Div, sym::div, div_trait, TraitId, GenericRequirement::Exact(1);
Rem, sym::rem, rem_trait, TraitId, GenericRequirement::Exact(1);
Neg, sym::neg, neg_trait, TraitId, GenericRequirement::Exact(0);
Not, sym::not, not_trait, TraitId, GenericRequirement::Exact(0);
BitXor, sym::bitxor, bitxor_trait, TraitId, GenericRequirement::Exact(1);
BitAnd, sym::bitand, bitand_trait, TraitId, GenericRequirement::Exact(1);
BitOr, sym::bitor, bitor_trait, TraitId, GenericRequirement::Exact(1);
Shl, sym::shl, shl_trait, TraitId, GenericRequirement::Exact(1);
Shr, sym::shr, shr_trait, TraitId, GenericRequirement::Exact(1);
AddAssign, sym::add_assign, add_assign_trait, TraitId, GenericRequirement::Exact(1);
SubAssign, sym::sub_assign, sub_assign_trait, TraitId, GenericRequirement::Exact(1);
MulAssign, sym::mul_assign, mul_assign_trait, TraitId, GenericRequirement::Exact(1);
DivAssign, sym::div_assign, div_assign_trait, TraitId, GenericRequirement::Exact(1);
RemAssign, sym::rem_assign, rem_assign_trait, TraitId, GenericRequirement::Exact(1);
BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, TraitId, GenericRequirement::Exact(1);
BitAndAssign, sym::bitand_assign, bitand_assign_trait, TraitId, GenericRequirement::Exact(1);
BitOrAssign, sym::bitor_assign, bitor_assign_trait, TraitId, GenericRequirement::Exact(1);
ShlAssign, sym::shl_assign, shl_assign_trait, TraitId, GenericRequirement::Exact(1);
ShrAssign, sym::shr_assign, shr_assign_trait, TraitId, GenericRequirement::Exact(1);
Index, sym::index, index_trait, TraitId, GenericRequirement::Exact(1);
IndexMut, sym::index_mut, index_mut_trait, TraitId, GenericRequirement::Exact(1);
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None;
UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, Target::Struct, GenericRequirement::None;
VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None;
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, StructId, GenericRequirement::None;
UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, StructId, GenericRequirement::None;
VaList, sym::va_list, va_list, StructId, GenericRequirement::None;
Deref, sym::deref, deref_trait, Target::Trait, GenericRequirement::Exact(0);
DerefMut, sym::deref_mut, deref_mut_trait, Target::Trait, GenericRequirement::Exact(0);
DerefTarget, sym::deref_target, deref_target, Target::AssocTy, GenericRequirement::None;
Receiver, sym::receiver, receiver_trait, Target::Trait, GenericRequirement::None;
ReceiverTarget, sym::receiver_target, receiver_target, Target::AssocTy, GenericRequirement::None;
Deref, sym::deref, deref_trait, TraitId, GenericRequirement::Exact(0);
DerefMut, sym::deref_mut, deref_mut_trait, TraitId, GenericRequirement::Exact(0);
DerefTarget, sym::deref_target, deref_target, TypeAliasId, GenericRequirement::None;
Receiver, sym::receiver, receiver_trait, TraitId, GenericRequirement::None;
ReceiverTarget, sym::receiver_target, receiver_target, TypeAliasId, GenericRequirement::None;
Fn, sym::fn_, fn_trait, Target::Trait, GenericRequirement::Exact(1);
FnMut, sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
FnOnce, sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
AsyncFn, sym::async_fn, async_fn_trait, Target::Trait, GenericRequirement::Exact(1);
AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
Fn, sym::fn_, fn_trait, TraitId, GenericRequirement::Exact(1);
FnMut, sym::fn_mut, fn_mut_trait, TraitId, GenericRequirement::Exact(1);
FnOnce, sym::fn_once, fn_once_trait, TraitId, GenericRequirement::Exact(1);
AsyncFn, sym::async_fn, async_fn_trait, TraitId, GenericRequirement::Exact(1);
AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, TraitId, GenericRequirement::Exact(1);
AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, TraitId, GenericRequirement::Exact(1);
CallRefFuture, sym::call_ref_future, call_ref_future_ty, Target::AssocTy, GenericRequirement::None;
CallOnceFuture, sym::call_once_future, call_once_future_ty, Target::AssocTy, GenericRequirement::None;
AsyncFnOnceOutput, sym::async_fn_once_output, async_fn_once_output_ty, Target::AssocTy, GenericRequirement::None;
CallRefFuture, sym::call_ref_future, call_ref_future_ty, TypeAliasId, GenericRequirement::None;
CallOnceFuture, sym::call_once_future, call_once_future_ty, TypeAliasId, GenericRequirement::None;
AsyncFnOnceOutput, sym::async_fn_once_output, async_fn_once_output_ty, TypeAliasId, GenericRequirement::None;
FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None;
FnOnceOutput, sym::fn_once_output, fn_once_output, TypeAliasId, GenericRequirement::None;
Future, sym::future_trait, future_trait, Target::Trait, GenericRequirement::Exact(0);
CoroutineState, sym::coroutine_state, coroutine_state, Target::Enum, GenericRequirement::None;
Coroutine, sym::coroutine, coroutine_trait, Target::Trait, GenericRequirement::Minimum(1);
CoroutineReturn, sym::coroutine_return, coroutine_return_ty, Target::AssocTy, GenericRequirement::None;
CoroutineYield, sym::coroutine_yield, coroutine_yield_ty, Target::AssocTy, GenericRequirement::None;
Unpin, sym::unpin, unpin_trait, Target::Trait, GenericRequirement::None;
Pin, sym::pin, pin_type, Target::Struct, GenericRequirement::None;
Future, sym::future_trait, future_trait, TraitId, GenericRequirement::Exact(0);
CoroutineState, sym::coroutine_state, coroutine_state, EnumId, GenericRequirement::None;
Coroutine, sym::coroutine, coroutine_trait, TraitId, GenericRequirement::Minimum(1);
CoroutineReturn, sym::coroutine_return, coroutine_return_ty, TypeAliasId, GenericRequirement::None;
CoroutineYield, sym::coroutine_yield, coroutine_yield_ty, TypeAliasId, GenericRequirement::None;
Unpin, sym::unpin, unpin_trait, TraitId, GenericRequirement::None;
Pin, sym::pin, pin_type, StructId, GenericRequirement::None;
PartialEq, sym::eq, eq_trait, Target::Trait, GenericRequirement::Exact(1);
PartialOrd, sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
CVoid, sym::c_void, c_void, Target::Enum, GenericRequirement::None;
PartialEq, sym::eq, eq_trait, TraitId, GenericRequirement::Exact(1);
PartialOrd, sym::partial_ord, partial_ord_trait, TraitId, GenericRequirement::Exact(1);
CVoid, sym::c_void, c_void, EnumId, GenericRequirement::None;
// A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
// various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
@ -424,107 +327,107 @@ language_item_table! {
// in the sense that a crate is not required to have it defined to use it, but a final product
// is required to define it somewhere. Additionally, there are restrictions on crates that use
// a weak lang item, but do not have it defined.
Panic, sym::panic, panic_fn, Target::Fn, GenericRequirement::Exact(0);
PanicNounwind, sym::panic_nounwind, panic_nounwind, Target::Fn, GenericRequirement::Exact(0);
PanicFmt, sym::panic_fmt, panic_fmt, Target::Fn, GenericRequirement::None;
PanicDisplay, sym::panic_display, panic_display, Target::Fn, GenericRequirement::None;
ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, Target::Fn, GenericRequirement::None;
PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, Target::Fn, GenericRequirement::Exact(0);
PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, Target::Fn, GenericRequirement::Exact(0);
PanicInfo, sym::panic_info, panic_info, Target::Struct, GenericRequirement::None;
PanicLocation, sym::panic_location, panic_location, Target::Struct, GenericRequirement::None;
PanicImpl, sym::panic_impl, panic_impl, Target::Fn, GenericRequirement::None;
PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, Target::Fn, GenericRequirement::Exact(0);
PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, Target::Fn, GenericRequirement::None;
Panic, sym::panic, panic_fn, FunctionId, GenericRequirement::Exact(0);
PanicNounwind, sym::panic_nounwind, panic_nounwind, FunctionId, GenericRequirement::Exact(0);
PanicFmt, sym::panic_fmt, panic_fmt, FunctionId, GenericRequirement::None;
PanicDisplay, sym::panic_display, panic_display, FunctionId, GenericRequirement::None;
ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, FunctionId, GenericRequirement::None;
PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, FunctionId, GenericRequirement::Exact(0);
PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, FunctionId, GenericRequirement::Exact(0);
PanicInfo, sym::panic_info, panic_info, StructId, GenericRequirement::None;
PanicLocation, sym::panic_location, panic_location, StructId, GenericRequirement::None;
PanicImpl, sym::panic_impl, panic_impl, FunctionId, GenericRequirement::None;
PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, FunctionId, GenericRequirement::Exact(0);
PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, FunctionId, GenericRequirement::None;
/// libstd panic entry point. Necessary for const eval to be able to catch it
BeginPanic, sym::begin_panic, begin_panic_fn, Target::Fn, GenericRequirement::None;
BeginPanic, sym::begin_panic, begin_panic_fn, FunctionId, GenericRequirement::None;
// Lang items needed for `format_args!()`.
FormatAlignment, sym::format_alignment, format_alignment, Target::Enum, GenericRequirement::None;
FormatArgument, sym::format_argument, format_argument, Target::Struct, GenericRequirement::None;
FormatArguments, sym::format_arguments, format_arguments, Target::Struct, GenericRequirement::None;
FormatCount, sym::format_count, format_count, Target::Enum, GenericRequirement::None;
FormatPlaceholder, sym::format_placeholder, format_placeholder, Target::Struct, GenericRequirement::None;
FormatUnsafeArg, sym::format_unsafe_arg, format_unsafe_arg, Target::Struct, GenericRequirement::None;
FormatAlignment, sym::format_alignment, format_alignment, EnumId, GenericRequirement::None;
FormatArgument, sym::format_argument, format_argument, StructId, GenericRequirement::None;
FormatArguments, sym::format_arguments, format_arguments, StructId, GenericRequirement::None;
FormatCount, sym::format_count, format_count, EnumId, GenericRequirement::None;
FormatPlaceholder, sym::format_placeholder, format_placeholder, StructId, GenericRequirement::None;
FormatUnsafeArg, sym::format_unsafe_arg, format_unsafe_arg, StructId, GenericRequirement::None;
ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None;
BoxFree, sym::box_free, box_free_fn, Target::Fn, GenericRequirement::Minimum(1);
DropInPlace, sym::drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1);
AllocLayout, sym::alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None;
ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, FunctionId, GenericRequirement::None;
BoxFree, sym::box_free, box_free_fn, FunctionId, GenericRequirement::Minimum(1);
DropInPlace, sym::drop_in_place, drop_in_place_fn, FunctionId, GenericRequirement::Minimum(1);
AllocLayout, sym::alloc_layout, alloc_layout, StructId, GenericRequirement::None;
Start, sym::start, start_fn, Target::Fn, GenericRequirement::Exact(1);
Start, sym::start, start_fn, FunctionId, GenericRequirement::Exact(1);
EhPersonality, sym::eh_personality, eh_personality, Target::Fn, GenericRequirement::None;
EhCatchTypeinfo, sym::eh_catch_typeinfo, eh_catch_typeinfo, Target::Static, GenericRequirement::None;
EhPersonality, sym::eh_personality, eh_personality, FunctionId, GenericRequirement::None;
EhCatchTypeinfo, sym::eh_catch_typeinfo, eh_catch_typeinfo, StaticId, GenericRequirement::None;
OwnedBox, sym::owned_box, owned_box, Target::Struct, GenericRequirement::Minimum(1);
OwnedBox, sym::owned_box, owned_box, StructId, GenericRequirement::Minimum(1);
PhantomData, sym::phantom_data, phantom_data, Target::Struct, GenericRequirement::Exact(1);
PhantomData, sym::phantom_data, phantom_data, StructId, GenericRequirement::Exact(1);
ManuallyDrop, sym::manually_drop, manually_drop, Target::Struct, GenericRequirement::None;
ManuallyDrop, sym::manually_drop, manually_drop, StructId, GenericRequirement::None;
MaybeUninit, sym::maybe_uninit, maybe_uninit, Target::Union, GenericRequirement::None;
MaybeUninit, sym::maybe_uninit, maybe_uninit, UnionId, GenericRequirement::None;
/// Align offset for stride != 1; must not panic.
AlignOffset, sym::align_offset, align_offset_fn, Target::Fn, GenericRequirement::None;
AlignOffset, sym::align_offset, align_offset_fn, FunctionId, GenericRequirement::None;
Termination, sym::termination, termination, Target::Trait, GenericRequirement::None;
Termination, sym::termination, termination, TraitId, GenericRequirement::None;
Try, sym::Try, try_trait, Target::Trait, GenericRequirement::None;
Try, sym::Try, try_trait, TraitId, GenericRequirement::None;
Tuple, sym::tuple_trait, tuple_trait, Target::Trait, GenericRequirement::Exact(0);
Tuple, sym::tuple_trait, tuple_trait, TraitId, GenericRequirement::Exact(0);
SliceLen, sym::slice_len_fn, slice_len_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
SliceLen, sym::slice_len_fn, slice_len_fn, FunctionId, GenericRequirement::None;
// Language items from AST lowering
TryTraitFromResidual, sym::from_residual, from_residual_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
TryTraitFromOutput, sym::from_output, from_output_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
TryTraitBranch, sym::branch, branch_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
TryTraitFromYeet, sym::from_yeet, from_yeet_fn, Target::Fn, GenericRequirement::None;
TryTraitFromResidual, sym::from_residual, from_residual_fn, FunctionId, GenericRequirement::None;
TryTraitFromOutput, sym::from_output, from_output_fn, FunctionId, GenericRequirement::None;
TryTraitBranch, sym::branch, branch_fn, FunctionId, GenericRequirement::None;
TryTraitFromYeet, sym::from_yeet, from_yeet_fn, FunctionId, GenericRequirement::None;
PointerLike, sym::pointer_like, pointer_like, Target::Trait, GenericRequirement::Exact(0);
PointerLike, sym::pointer_like, pointer_like, TraitId, GenericRequirement::Exact(0);
ConstParamTy, sym::const_param_ty, const_param_ty_trait, Target::Trait, GenericRequirement::Exact(0);
ConstParamTy, sym::const_param_ty, const_param_ty_trait, TraitId, GenericRequirement::Exact(0);
Poll, sym::Poll, poll, Target::Enum, GenericRequirement::None;
PollReady, sym::Ready, poll_ready_variant, Target::Variant, GenericRequirement::None;
PollPending, sym::Pending, poll_pending_variant, Target::Variant, GenericRequirement::None;
Poll, sym::Poll, poll, EnumId, GenericRequirement::None;
PollReady, sym::Ready, poll_ready_variant, EnumVariantId, GenericRequirement::None;
PollPending, sym::Pending, poll_pending_variant, EnumVariantId, GenericRequirement::None;
// FIXME(swatinem): the following lang items are used for async lowering and
// should become obsolete eventually.
ResumeTy, sym::ResumeTy, resume_ty, Target::Struct, GenericRequirement::None;
GetContext, sym::get_context, get_context_fn, Target::Fn, GenericRequirement::None;
ResumeTy, sym::ResumeTy, resume_ty, StructId, GenericRequirement::None;
GetContext, sym::get_context, get_context_fn, FunctionId, GenericRequirement::None;
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
FutureOutput, sym::future_output, future_output, Target::TypeAlias, GenericRequirement::None;
Context, sym::Context, context, StructId, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, FunctionId, GenericRequirement::None;
FutureOutput, sym::future_output, future_output, TypeAliasId, GenericRequirement::None;
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
OptionNone, sym::None, option_none_variant, Target::Variant, GenericRequirement::None;
Option, sym::Option, option_type, EnumId, GenericRequirement::None;
OptionSome, sym::Some, option_some_variant, EnumVariantId, GenericRequirement::None;
OptionNone, sym::None, option_none_variant, EnumVariantId, GenericRequirement::None;
ResultOk, sym::Ok, result_ok_variant, Target::Variant, GenericRequirement::None;
ResultErr, sym::Err, result_err_variant, Target::Variant, GenericRequirement::None;
ResultOk, sym::Ok, result_ok_variant, EnumVariantId, GenericRequirement::None;
ResultErr, sym::Err, result_err_variant, EnumVariantId, GenericRequirement::None;
ControlFlowContinue, sym::Continue, cf_continue_variant, Target::Variant, GenericRequirement::None;
ControlFlowBreak, sym::Break, cf_break_variant, Target::Variant, GenericRequirement::None;
ControlFlowContinue, sym::Continue, cf_continue_variant, EnumVariantId, GenericRequirement::None;
ControlFlowBreak, sym::Break, cf_break_variant, EnumVariantId, GenericRequirement::None;
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
Iterator, sym::iterator, iterator, Target::Trait, GenericRequirement::None;
IntoFutureIntoFuture, sym::into_future, into_future_fn, FunctionId, GenericRequirement::None;
IntoIterIntoIter, sym::into_iter, into_iter_fn, FunctionId, GenericRequirement::None;
IteratorNext, sym::next, next_fn, FunctionId, GenericRequirement::None;
Iterator, sym::iterator, iterator, TraitId, GenericRequirement::None;
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, FunctionId, GenericRequirement::None;
RangeFrom, sym::RangeFrom, range_from_struct, Target::Struct, GenericRequirement::None;
RangeFull, sym::RangeFull, range_full_struct, Target::Struct, GenericRequirement::None;
RangeInclusiveStruct, sym::RangeInclusive, range_inclusive_struct, Target::Struct, GenericRequirement::None;
RangeInclusiveNew, sym::range_inclusive_new, range_inclusive_new_method, Target::Method(MethodKind::Inherent), GenericRequirement::None;
Range, sym::Range, range_struct, Target::Struct, GenericRequirement::None;
RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, Target::Struct, GenericRequirement::None;
RangeTo, sym::RangeTo, range_to_struct, Target::Struct, GenericRequirement::None;
RangeFrom, sym::RangeFrom, range_from_struct, StructId, GenericRequirement::None;
RangeFull, sym::RangeFull, range_full_struct, StructId, GenericRequirement::None;
RangeInclusiveStruct, sym::RangeInclusive, range_inclusive_struct, StructId, GenericRequirement::None;
RangeInclusiveNew, sym::range_inclusive_new, range_inclusive_new_method, FunctionId, GenericRequirement::None;
Range, sym::Range, range_struct, StructId, GenericRequirement::None;
RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, StructId, GenericRequirement::None;
RangeTo, sym::RangeTo, range_to_struct, StructId, GenericRequirement::None;
String, sym::String, string, Target::Struct, GenericRequirement::None;
CStr, sym::CStr, c_str, Target::Struct, GenericRequirement::None;
Ordering, sym::Ordering, ordering, Target::Enum, GenericRequirement::None;
String, sym::String, string, StructId, GenericRequirement::None;
CStr, sym::CStr, c_str, StructId, GenericRequirement::None;
Ordering, sym::Ordering, ordering, EnumId, GenericRequirement::None;
}

View file

@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi;
pub mod db;
pub mod attr;
pub mod attrs;
pub mod builtin_type;
pub mod item_scope;
pub mod per_ns;
@ -45,7 +45,7 @@ pub mod find_path;
pub mod import_map;
pub mod visibility;
use intern::{Interned, Symbol, sym};
use intern::{Interned, Symbol};
pub use rustc_abi as layout;
use thin_vec::ThinVec;
use triomphe::Arc;
@ -80,7 +80,7 @@ use syntax::{AstNode, ast};
pub use hir_expand::{Intern, Lookup, tt};
use crate::{
attr::Attrs,
attrs::AttrFlags,
builtin_type::BuiltinType,
db::DefDatabase,
expr_store::ExpressionStoreSourceMap,
@ -600,17 +600,17 @@ impl HasModule for ModuleId {
/// An ID of a module, **local** to a `DefMap`.
pub type LocalModuleId = Idx<nameres::ModuleData>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
pub struct FieldId {
// FIXME: Store this as an erased `salsa::Id` to save space
pub parent: VariantId,
pub local_id: LocalFieldId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
pub struct TupleId(pub u32);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
pub struct TupleFieldId {
pub tuple: TupleId,
pub index: u32,
@ -956,10 +956,16 @@ impl CallableDefId {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
// FIXME: We probably should use this in more places.
/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything.
#[salsa_macros::interned(debug, no_lifetime)]
pub struct InternedModuleId {
pub loc: ModuleId,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)]
pub enum AttrDefId {
ModuleId(ModuleId),
FieldId(FieldId),
ModuleId(InternedModuleId),
AdtId(AdtId),
FunctionId(FunctionId),
EnumVariantId(EnumVariantId),
@ -969,15 +975,12 @@ pub enum AttrDefId {
TypeAliasId(TypeAliasId),
MacroId(MacroId),
ImplId(ImplId),
GenericParamId(GenericParamId),
ExternBlockId(ExternBlockId),
ExternCrateId(ExternCrateId),
UseId(UseId),
}
impl_from!(
ModuleId,
FieldId,
AdtId(StructId, EnumId, UnionId),
EnumVariantId,
StaticId,
@ -987,41 +990,11 @@ impl_from!(
TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId,
GenericParamId,
ExternCrateId,
UseId
for AttrDefId
);
impl TryFrom<ModuleDefId> for AttrDefId {
type Error = ();
fn try_from(value: ModuleDefId) -> Result<Self, Self::Error> {
match value {
ModuleDefId::ModuleId(it) => Ok(it.into()),
ModuleDefId::FunctionId(it) => Ok(it.into()),
ModuleDefId::AdtId(it) => Ok(it.into()),
ModuleDefId::EnumVariantId(it) => Ok(it.into()),
ModuleDefId::ConstId(it) => Ok(it.into()),
ModuleDefId::StaticId(it) => Ok(it.into()),
ModuleDefId::TraitId(it) => Ok(it.into()),
ModuleDefId::TypeAliasId(it) => Ok(it.into()),
ModuleDefId::MacroId(id) => Ok(id.into()),
ModuleDefId::BuiltinType(_) => Err(()),
}
}
}
impl From<ItemContainerId> for AttrDefId {
fn from(acid: ItemContainerId) -> Self {
match acid {
ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
}
}
}
impl From<AssocItemId> for AttrDefId {
fn from(assoc: AssocItemId) -> Self {
match assoc {
@ -1041,7 +1014,7 @@ impl From<VariantId> for AttrDefId {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype, salsa::Update)]
pub enum VariantId {
EnumVariantId(EnumVariantId),
StructId(StructId),
@ -1262,8 +1235,7 @@ impl HasModule for GenericDefId {
impl HasModule for AttrDefId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self {
AttrDefId::ModuleId(it) => *it,
AttrDefId::FieldId(it) => it.parent.module(db),
AttrDefId::ModuleId(it) => it.loc(db),
AttrDefId::AdtId(it) => it.module(db),
AttrDefId::FunctionId(it) => it.module(db),
AttrDefId::EnumVariantId(it) => it.module(db),
@ -1273,12 +1245,6 @@ impl HasModule for AttrDefId {
AttrDefId::TypeAliasId(it) => it.module(db),
AttrDefId::ImplId(it) => it.module(db),
AttrDefId::ExternBlockId(it) => it.module(db),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::TypeParamId(it) => it.parent(),
GenericParamId::ConstParamId(it) => it.parent(),
GenericParamId::LifetimeParamId(it) => it.parent,
}
.module(db),
AttrDefId::MacroId(it) => it.module(db),
AttrDefId::ExternCrateId(it) => it.module(db),
AttrDefId::UseId(it) => it.module(db),
@ -1402,32 +1368,18 @@ pub enum Complete {
}
impl Complete {
pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
let mut do_not_complete = Complete::Yes;
for ra_attr in attrs.rust_analyzer_tool() {
let segments = ra_attr.path.segments();
if segments.len() != 2 {
continue;
}
let action = segments[1].symbol();
if *action == sym::completions {
match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
if ident.sym == sym::ignore_flyimport {
do_not_complete = Complete::IgnoreFlyimport;
} else if is_trait {
if ident.sym == sym::ignore_methods {
do_not_complete = Complete::IgnoreMethods;
} else if ident.sym == sym::ignore_flyimport_methods {
do_not_complete = Complete::IgnoreFlyimportMethods;
}
}
}
_ => {}
}
#[inline]
pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete {
if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) {
return Complete::IgnoreFlyimport;
} else if is_trait {
if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) {
return Complete::IgnoreMethods;
} else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) {
return Complete::IgnoreFlyimportMethods;
}
}
do_not_complete
Complete::Yes
}
#[inline]

View file

@ -300,21 +300,21 @@ fn match_by_first_token_literally() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { = bar }
m! { + Baz }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -326,21 +326,21 @@ fn match_by_last_token_literally() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { bar = }
m! { Baz + }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -352,21 +352,21 @@ fn match_by_ident() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { spam bar }
m! { eggs Baz }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -378,12 +378,12 @@ fn match_by_separator_token() {
check(
r#"
macro_rules! m {
($($i:ident),*) => ($(mod $i {} )*);
($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
m! { foo, bar }
m! { Baz, Qux }
m! { foo# bar }
@ -391,13 +391,13 @@ m! { Foo,# Bar }
"#,
expect![[r#"
macro_rules! m {
($($i:ident),*) => ($(mod $i {} )*);
($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
mod foo {}
mod bar {}
enum Baz {}
enum Qux {}
fn foo() {}
fn bar() {}
@ -1114,11 +1114,11 @@ fn test_single_item() {
check(
r#"
macro_rules! m { ($i:item) => ( $i ) }
m! { mod c {} }
m! { struct C {} }
"#,
expect![[r#"
macro_rules! m { ($i:item) => ( $i ) }
mod c {}
struct C {}
"#]],
)
}
@ -1144,6 +1144,7 @@ m! {
type T = u8;
}
"#,
// The modules are counted twice, once because of the module and once because of the macro call.
expect![[r#"
macro_rules! m { ($($i:item)*) => ($($i )*) }
extern crate a;
@ -1161,7 +1162,9 @@ trait J {}
fn h() {}
extern {}
type T = u8;
"#]],
mod b;
mod c {}"#]],
);
}
@ -1958,28 +1961,6 @@ fn f() {
);
}
#[test]
fn test_edition_handling_in() {
check(
r#"
//- /main.rs crate:main deps:old edition:2021
fn f() {
old::parse_try_old!(try!{});
}
//- /old.rs crate:old edition:2015
#[macro_export]
macro_rules! parse_try_old {
($it:expr) => {};
}
"#,
expect![[r#"
fn f() {
;
}
"#]],
);
}
#[test]
fn semicolon_does_not_glue() {
check(
@ -2051,3 +2032,33 @@ fn f() {
"#]],
);
}
#[test]
fn per_token_edition() {
check(
r#"
//- /foo.rs crate:foo edition:2024
#[macro_export]
macro_rules! m {
($e:expr) => {};
}
//- /bar.rs crate:bar deps:foo edition:2021
fn gen() -> usize {
0
}
fn foo() {
foo::m!(gen());
}
"#,
expect![[r#"
fn gen() -> usize {
0
}
fn foo() {
;
}
"#]],
);
}

View file

@ -245,6 +245,21 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
}
for (_, module) in def_map.modules() {
let Some(src) = module.declaration_source(&db) else {
continue;
};
if let Some(macro_file) = src.file_id.macro_file() {
let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(macro_file.into()).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp)
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if let Some(macro_file) = src.file_id.macro_file()
@ -372,7 +387,6 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| span::Edition::CURRENT,
span::Edition::CURRENT,
);
if parse.errors().is_empty() {
Ok(subtree.clone())
@ -413,10 +427,7 @@ fn regression_20171() {
#dollar_crate::panic::panic_2021!();
}}
};
token_tree_to_syntax_node(
&tt,
syntax_bridge::TopEntryPoint::MacroStmts,
&mut |_| Edition::CURRENT,
Edition::CURRENT,
);
token_tree_to_syntax_node(&tt, syntax_bridge::TopEntryPoint::MacroStmts, &mut |_| {
Edition::CURRENT
});
}

View file

@ -9,37 +9,65 @@ use crate::macro_expansion_tests::{check, check_errors};
#[test]
fn attribute_macro_attr_censoring() {
cov_mark::check!(attribute_macro_attr_censoring);
check(
r#"
//- proc_macros: identity
#[attr1] #[proc_macros::identity] #[attr2]
struct S;
"#,
expect![[r#"
#[attr1] #[proc_macros::identity] #[attr2]
//- minicore: derive
#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
/// Foo
#[cfg_attr(false, doc = "abc...", attr1)]
mod foo {
#![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
#![cfg_attr(true, doc = "123...", attr2)]
#![attr3]
#[cfg_attr(true, cfg(false))]
fn foo() {}
#[cfg(true)]
fn bar() {}
}
"#,
expect![[r##"
#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
/// Foo
#[cfg_attr(false, doc = "abc...", attr1)]
mod foo {
#![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
#![cfg_attr(true, doc = "123...", attr2)]
#![attr3]
#[cfg_attr(true, cfg(false))]
fn foo() {}
#[cfg(true)]
fn bar() {}
}
#[attr1]
#[attr2] struct S;"#]],
#[attr2] struct S;
#[doc = " Foo"] mod foo {
# ![foo]
# ![doc = "123..."]
# ![attr2]
# ![attr3]
#[cfg_attr(true , cfg(false ))] fn foo() {}
#[cfg(true )] fn bar() {}
}"##]],
);
}
#[test]
fn derive_censoring() {
cov_mark::check!(derive_censoring);
check(
r#"
//- proc_macros: derive_identity
//- minicore:derive
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
#[derive(Bar)]
#[attr2]
struct S;
"#,
expect![[r#"
use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
@ -47,6 +75,60 @@ struct S;
#[attr2]
struct S;
#[my_cool_derive()]
#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
#[my_cool_derive()]
struct Foo {
#[cfg_attr(false, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, attr3)]
v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
v3: Foo<{
#[cfg(false)]
let foo = 123;
456
}>,
#[cfg(false)]
v4: bool // No comma here
}
"#,
expect![[r#"
use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
#[derive(Bar)]
#[attr2]
struct S;
#[my_cool_derive()]
#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
#[my_cool_derive()]
struct Foo {
#[cfg_attr(false, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, attr3)]
v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
v3: Foo<{
#[cfg(false)]
let foo = 123;
456
}>,
#[cfg(false)]
v4: bool // No comma here
}
#[attr1]
#[my_cool_derive()] struct Foo {
v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
456
}
>,
}
#[attr1]
#[derive(Bar)]
#[attr2] struct S;"#]],
@ -87,7 +169,7 @@ fn foo() { bar.; blub }
fn foo() { bar.; blub }
fn foo() {
bar. ;
bar.;
blub
}"#]],
);
@ -234,3 +316,28 @@ use proc_macros::disallow_cfg;
expect![[r#""#]],
);
}
#[test]
fn derive_helpers_are_ignored() {
check(
r#"
//- proc_macros: identity, helper_should_be_ignored, helper_should_be_ignored_derive
//- minicore: derive
use proc_macros::{identity, helper_should_be_ignored, HelperShouldBeIgnoredDerive};
#[derive(HelperShouldBeIgnoredDerive)]
#[helper_should_be_ignored]
#[identity]
struct Foo;
"#,
expect![[r#"
use proc_macros::{identity, helper_should_be_ignored, HelperShouldBeIgnoredDerive};
#[derive(HelperShouldBeIgnoredDerive)]
#[helper_should_be_ignored]
#[identity]
struct Foo;
#[helper_should_be_ignored] struct Foo;"#]],
);
}

View file

@ -391,19 +391,14 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
)
.entered();
let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
Visibility::Public,
);
let root_file_id = crate_id.root_file_id(db);
let module_data =
ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public);
let def_map =
DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
let (def_map, local_def_map) = collector::collect_defs(
db,
def_map,
TreeId::new(krate.root_file_id(db).into(), None),
None,
);
let (def_map, local_def_map) =
collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None);
DefMapPair::new(db, def_map, local_def_map)
}

View file

@ -4,7 +4,8 @@ use std::mem;
use cfg::CfgOptions;
use hir_expand::{
AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind,
MacroDefKind,
mod_path::ModPath,
name::{AsName, Name},
span_map::SpanMap,
@ -21,8 +22,8 @@ use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
attr::Attrs,
db::DefDatabase,
item_tree::AttrsOrCfg,
macro_call_as_call_id,
nameres::{
DefMap, LocalDefMap, MacroSubNs,
@ -191,19 +192,22 @@ impl<'a> AssocItemCollector<'a> {
fn collect_item(&mut self, item: ast::AssocItem) {
let ast_id = self.ast_id_map.ast_id(&item);
let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.file_id, ast_id.erase()),
cfg,
self.cfg_options.clone(),
));
return;
}
let attrs =
match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) {
AttrsOrCfg::Enabled { attrs } => attrs,
AttrsOrCfg::CfgDisabled(cfg) => {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.file_id, ast_id.erase()),
cfg.0,
self.cfg_options.clone(),
));
return;
}
};
let ast_id = InFile::new(self.file_id, ast_id.upcast());
'attrs: for attr in &*attrs {
'attrs: for (attr_id, attr) in attrs.as_ref().iter() {
let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
@ -212,6 +216,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
ast_id_with_path,
attr,
attr_id,
) {
Ok(ResolvedAttr::Macro(call_id)) => {
let loc = self.db.lookup_intern_macro_call(call_id);
@ -240,8 +245,12 @@ impl<'a> AssocItemCollector<'a> {
Err(_) => {
self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
self.module_id.local_id,
MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
attr.path().clone(),
MacroCallKind::Attr {
ast_id,
attr_args: None,
censored_attr_ids: AttrMacroAttrIds::from_one(attr_id),
},
(*attr.path).clone(),
));
}
}

View file

@ -2,7 +2,7 @@
use base_db::Crate;
use hir_expand::{
MacroCallId, MacroCallKind, MacroDefId,
AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId,
attrs::{Attr, AttrId, AttrInput},
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@ -28,6 +28,7 @@ pub enum ResolvedAttr {
}
impl DefMap {
/// This cannot be used to resolve items that allow derives.
pub(crate) fn resolve_attr_macro(
&self,
local_def_map: &LocalDefMap,
@ -35,6 +36,7 @@ impl DefMap {
original_module: LocalModuleId,
ast_id: AstIdWithPath<ast::Item>,
attr: &Attr,
attr_id: AttrId,
) -> Result<ResolvedAttr, UnresolvedMacro> {
// NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
@ -68,6 +70,9 @@ impl DefMap {
db,
&ast_id,
attr,
// There aren't any active attributes before this one, because attribute macros
// replace their input, and derive macros are not allowed in this function.
AttrMacroAttrIds::from_one(attr_id),
self.krate,
db.macro_def(def),
)))
@ -102,6 +107,7 @@ pub(super) fn attr_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
censored_attr_ids: AttrMacroAttrIds,
krate: Crate,
def: MacroDefId,
) -> MacroCallId {
@ -121,7 +127,7 @@ pub(super) fn attr_macro_as_call_id(
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
censored_attr_ids,
},
macro_attr.ctxt,
)

View file

@ -3,14 +3,14 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
use std::{cmp::Ordering, iter, mem, ops::Not};
use std::{cmp::Ordering, iter, mem};
use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind,
AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
MacroCallKind, MacroDefId, MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@ -18,9 +18,10 @@ use hir_expand::{
proc_macro::CustomProcMacroExpander,
};
use intern::{Interned, sym};
use itertools::{Itertools, izip};
use itertools::izip;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast;
use triomphe::Arc;
@ -32,12 +33,11 @@ use crate::{
MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
UseLoc,
attr::Attrs,
db::DefDatabase,
item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
item_tree::{
self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
MacroRules, Mod, ModItemId, ModKind, TreeId,
self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId,
},
macro_call_as_call_id,
nameres::{
@ -102,6 +102,7 @@ pub(super) fn collect_defs(
proc_macros,
from_glob_import: Default::default(),
skip_attrs: Default::default(),
prev_active_attrs: Default::default(),
unresolved_extern_crates: Default::default(),
is_proc_macro: krate.is_proc_macro,
};
@ -206,6 +207,7 @@ enum MacroDirectiveKind<'db> {
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr_id: AttrId,
attr: Attr,
mod_item: ModItemId,
/* is this needed? */ tree: TreeId,
@ -246,28 +248,27 @@ struct DefCollector<'db> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
// FIXME: There has to be a better way to do this
skip_attrs: FxHashMap<InFile<FileAstId<ast::Item>>, AttrId>,
skip_attrs: FxHashMap<AstId<ast::Item>, AttrId>,
/// When we expand attributes, we need to censor all previous active attributes
/// on the same item. Therefore, this holds all active attributes that we already
/// expanded.
prev_active_attrs: FxHashMap<AstId<ast::Item>, SmallVec<[AttrId; 1]>>,
}
impl<'db> DefCollector<'db> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
let file_id = self.def_map.krate.root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let attrs = match item_tree.top_level_attrs() {
AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
};
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
let mut process = true;
// Process other crate-level attributes.
for attr in &*attrs {
if let Some(cfg) = attr.cfg()
&& self.cfg_options.check(&cfg) == Some(false)
{
process = false;
break;
}
let Some(attr_name) = attr.path.as_ident() else { continue };
match () {
@ -291,7 +292,7 @@ impl<'db> DefCollector<'db> {
() if *attr_name == sym::feature => {
let features =
attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
|(feat, _)| match feat.segments() {
|(feat, _, _)| match feat.segments() {
[name] => Some(name.symbol().clone()),
_ => None,
},
@ -344,7 +345,7 @@ impl<'db> DefCollector<'db> {
self.inject_prelude();
if !process {
if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
return;
}
@ -362,10 +363,7 @@ impl<'db> DefCollector<'db> {
fn seed_with_inner(&mut self, tree_id: TreeId) {
let item_tree = tree_id.item_tree(self.db);
let is_cfg_enabled = item_tree
.top_level_attrs(self.db, self.def_map.krate)
.cfg()
.is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false));
let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
if is_cfg_enabled {
self.inject_prelude();
@ -456,18 +454,18 @@ impl<'db> DefCollector<'db> {
self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
.kind
{
MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: None,
invoc_attr_index: attr.id,
censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id),
},
attr.path().clone(),
(*attr.path).clone(),
));
self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id);
Some((idx, directive, *mod_item, *tree, *item_tree))
}
@ -1240,7 +1238,17 @@ impl<'db> DefCollector<'db> {
let mut macros = mem::take(&mut self.unresolved_macros);
let mut resolved = Vec::new();
let mut push_resolved = |directive: &MacroDirective<'_>, call_id| {
resolved.push((directive.module_id, directive.depth, directive.container, call_id));
let attr_macro_item = match &directive.kind {
MacroDirectiveKind::Attr { ast_id, .. } => Some(ast_id.ast_id),
MacroDirectiveKind::FnLike { .. } | MacroDirectiveKind::Derive { .. } => None,
};
resolved.push((
directive.module_id,
directive.depth,
directive.container,
call_id,
attr_macro_item,
));
};
#[derive(PartialEq, Eq)]
@ -1350,6 +1358,7 @@ impl<'db> DefCollector<'db> {
MacroDirectiveKind::Attr {
ast_id: file_ast_id,
mod_item,
attr_id,
attr,
tree,
item_tree,
@ -1362,7 +1371,7 @@ impl<'db> DefCollector<'db> {
let mod_dir = collector.mod_dirs[&directive.module_id].clone();
collector
.skip_attrs
.insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
.insert(InFile::new(file_id, mod_item.ast_id()), *attr_id);
ModCollector {
def_collector: collector,
@ -1398,7 +1407,6 @@ impl<'db> DefCollector<'db> {
// being cfg'ed out).
// Ideally we will just expand them to nothing here. But we are only collecting macro calls,
// not expanding them, so we have no way to do that.
// If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`.
if matches!(
def.kind,
MacroDefKind::BuiltInAttr(_, expander)
@ -1410,8 +1418,18 @@ impl<'db> DefCollector<'db> {
}
}
let call_id = || {
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
let mut call_id = || {
let active_attrs = self.prev_active_attrs.entry(ast_id).or_default();
active_attrs.push(*attr_id);
attr_macro_as_call_id(
self.db,
file_ast_id,
attr,
AttrMacroAttrIds::from_many(active_attrs),
self.def_map.krate,
def,
)
};
if matches!(def,
MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
@ -1429,7 +1447,7 @@ impl<'db> DefCollector<'db> {
let diag = DefDiagnostic::invalid_derive_target(
directive.module_id,
ast_id,
attr.id,
*attr_id,
);
self.def_map.diagnostics.push(diag);
return recollect_without(self);
@ -1442,7 +1460,7 @@ impl<'db> DefCollector<'db> {
Some(derive_macros) => {
let call_id = call_id();
let mut len = 0;
for (idx, (path, call_site)) in derive_macros.enumerate() {
for (idx, (path, call_site, _)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(
file_id,
ast_id.value,
@ -1453,7 +1471,7 @@ impl<'db> DefCollector<'db> {
depth: directive.depth + 1,
kind: MacroDirectiveKind::Derive {
ast_id,
derive_attr: attr.id,
derive_attr: *attr_id,
derive_pos: idx,
ctxt: call_site.ctx,
derive_macro_id: call_id,
@ -1469,13 +1487,13 @@ impl<'db> DefCollector<'db> {
// Check the comment in [`builtin_attr_macro`].
self.def_map.modules[directive.module_id]
.scope
.init_derive_attribute(ast_id, attr.id, call_id, len + 1);
.init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
}
None => {
let diag = DefDiagnostic::malformed_derive(
directive.module_id,
ast_id,
attr.id,
*attr_id,
);
self.def_map.diagnostics.push(diag);
}
@ -1522,8 +1540,14 @@ impl<'db> DefCollector<'db> {
self.def_map.modules[module_id].scope.add_macro_invoc(ptr.map(|(_, it)| it), call_id);
}
for (module_id, depth, container, macro_call_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, depth, container);
for (module_id, depth, container, macro_call_id, attr_macro_item) in resolved {
self.collect_macro_expansion(
module_id,
macro_call_id,
depth,
container,
attr_macro_item,
);
}
res
@ -1535,6 +1559,7 @@ impl<'db> DefCollector<'db> {
macro_call_id: MacroCallId,
depth: usize,
container: ItemContainerId,
attr_macro_item: Option<AstId<ast::Item>>,
) {
if depth > self.def_map.recursion_limit() as usize {
cov_mark::hit!(macro_expansion_overflow);
@ -1545,6 +1570,34 @@ impl<'db> DefCollector<'db> {
let item_tree = self.db.file_item_tree(file_id);
// Derive helpers that are in scope for an item are also in scope for attribute macro expansions
// of that item (but not derive or fn like macros).
// FIXME: This is a hack. The proper way to do this is by having a chain of derive helpers scope,
// where the next scope in the chain is the parent hygiene context of the span. Unfortunately
// it's difficult to implement with our current name resolution and hygiene system.
// This hack is also incorrect since it ignores item in blocks. But the main reason to bring derive
// helpers into scope in this case is to help with:
// ```
// #[derive(DeriveWithHelper)]
// #[helper]
// #[attr_macro]
// struct Foo;
// ```
// Where `attr_macro`'s input will include `#[helper]` but not the derive, and it will likely therefore
// also include it in its output. Therefore I hope not supporting blocks is fine at least for now.
if let Some(attr_macro_item) = attr_macro_item
&& let Some(derive_helpers) = self.def_map.derive_helpers_in_scope.get(&attr_macro_item)
{
let derive_helpers = derive_helpers.clone();
for item in item_tree.top_level_items() {
self.def_map
.derive_helpers_in_scope
.entry(InFile::new(file_id, item.ast_id()))
.or_default()
.extend(derive_helpers.iter().cloned());
}
}
let mod_dir = if macro_call_id.is_include_macro(self.db) {
ModDir::root()
} else {
@ -1712,16 +1765,17 @@ impl ModCollector<'_, '_> {
};
let mut process_mod_item = |item: ModItemId| {
let attrs = self.item_tree.attrs(db, krate, item.ast_id());
if let Some(cfg) = attrs.cfg()
&& !self.is_cfg_enabled(&cfg)
{
let ast_id = item.ast_id().erase();
self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
return;
}
let attrs = match self.item_tree.attrs(item.ast_id()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(cfg)) => {
let ast_id = item.ast_id().erase();
self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0);
return;
}
};
if let Err(()) = self.resolve_attributes(&attrs, item, container) {
if let Err(()) = self.resolve_attributes(attrs, item, container) {
// Do not process the item. It has at least one non-builtin attribute, so the
// fixed-point algorithm is required to resolve the rest of them.
return;
@ -1733,7 +1787,7 @@ impl ModCollector<'_, '_> {
self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
match item {
ModItemId::Mod(m) => self.collect_module(m, &attrs),
ModItemId::Mod(m) => self.collect_module(m, attrs),
ModItemId::Use(item_tree_id) => {
let id =
UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
@ -2006,7 +2060,7 @@ impl ModCollector<'_, '_> {
);
return;
};
for (path, _) in paths {
for (path, _, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@ -2020,7 +2074,7 @@ impl ModCollector<'_, '_> {
);
}
fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: Attrs<'_>) {
let path_attr = attrs.by_key(sym::path).string_value_unescape();
let is_macro_use = attrs.by_key(sym::macro_use).exists();
let module = &self.item_tree[module_ast_id];
@ -2061,23 +2115,18 @@ impl ModCollector<'_, '_> {
self.file_id(),
&module.name,
path_attr.as_deref(),
self.def_collector.def_map.krate,
) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
let krate = self.def_collector.def_map.krate;
let is_enabled = item_tree
.top_level_attrs(db, krate)
.cfg()
.and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
.map_or(Ok(()), Err);
match is_enabled {
Err(cfg) => {
match item_tree.top_level_attrs() {
AttrsOrCfg::CfgDisabled(cfg) => {
self.emit_unconfigured_diagnostic(
InFile::new(self.file_id(), module_ast_id.erase()),
&cfg,
&cfg.0,
);
}
Ok(()) => {
AttrsOrCfg::Enabled { attrs } => {
let module_id = self.push_child_module(
module.name.clone(),
ast_id.value,
@ -2093,11 +2142,8 @@ impl ModCollector<'_, '_> {
mod_dir,
}
.collect_in_top_module(item_tree.top_level_items());
let is_macro_use = is_macro_use
|| item_tree
.top_level_attrs(db, krate)
.by_key(sym::macro_use)
.exists();
let is_macro_use =
is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists();
if is_macro_use {
self.import_all_legacy_macros(module_id);
}
@ -2185,36 +2231,16 @@ impl ModCollector<'_, '_> {
/// assumed to be resolved already.
fn resolve_attributes(
&mut self,
attrs: &Attrs,
attrs: Attrs<'_>,
mod_item: ModItemId,
container: ItemContainerId,
) -> Result<(), ()> {
let mut ignore_up_to = self
let ignore_up_to = self
.def_collector
.skip_attrs
.get(&InFile::new(self.file_id(), mod_item.ast_id()))
.copied();
let iter = attrs
.iter()
.dedup_by(|a, b| {
// FIXME: this should not be required, all attributes on an item should have a
// unique ID!
// Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
// #[cfg_attr(not(off), unresolved, unresolved)]
// struct S;
// We should come up with a different way to ID attributes.
a.id == b.id
})
.skip_while(|attr| match ignore_up_to {
Some(id) if attr.id == id => {
ignore_up_to = None;
true
}
Some(_) => true,
None => false,
});
for attr in iter {
for (attr_id, attr) in attrs.iter_after(ignore_up_to) {
if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
continue;
}
@ -2229,6 +2255,7 @@ impl ModCollector<'_, '_> {
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::Attr {
ast_id,
attr_id,
attr: attr.clone(),
mod_item,
tree: self.tree_id,
@ -2246,7 +2273,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
let attrs = match self.item_tree.attrs(ast_id.upcast()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(_)) => {
unreachable!("we only get here if the macro is not cfg'ed out")
}
};
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
let export_attr = || attrs.by_key(sym::macro_export);
@ -2331,7 +2364,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_def(&mut self, ast_id: ItemTreeAstId<Macro2>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
let attrs = match self.item_tree.attrs(ast_id.upcast()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(_)) => {
unreachable!("we only get here if the macro is not cfg'ed out")
}
};
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
// Case 1: builtin macros
@ -2460,6 +2499,7 @@ impl ModCollector<'_, '_> {
call_id,
self.macro_depth + 1,
container,
None,
);
}
@ -2515,10 +2555,6 @@ impl ModCollector<'_, '_> {
Some((a, b))
}
fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
self.def_collector.cfg_options.check(cfg) != Some(false)
}
fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
@ -2558,6 +2594,7 @@ mod tests {
proc_macros: Default::default(),
from_glob_import: Default::default(),
skip_attrs: Default::default(),
prev_active_attrs: Default::default(),
is_proc_macro: false,
unresolved_extern_crates: Default::default(),
};

View file

@ -17,8 +17,8 @@ pub enum DefDiagnosticKind {
UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
InvalidDeriveTarget { ast: AstId<ast::Item>, id: AttrId },
MalformedDerive { ast: AstId<ast::Adt>, id: AttrId },
MacroDefError { ast: AstId<ast::Macro>, message: String },
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
}
@ -119,10 +119,7 @@ impl DefDiagnostic {
ast: AstId<ast::Item>,
id: AttrId,
) -> Self {
Self {
in_module: container,
kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
}
Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } }
}
pub(super) fn malformed_derive(
@ -130,9 +127,6 @@ impl DefDiagnostic {
ast: AstId<ast::Adt>,
id: AttrId,
) -> Self {
Self {
in_module: container,
kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
}
Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } }
}
}

View file

@ -1,6 +1,6 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use base_db::{AnchoredPath, Crate};
use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@ -62,6 +62,7 @@ impl ModDir {
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
krate: Crate,
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.as_str();
@ -91,7 +92,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
is_mod_rs,
mod_dir,
));

View file

@ -3,8 +3,10 @@
use hir_expand::name::{AsName, Name};
use intern::sym;
use crate::attr::Attrs;
use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
use crate::{
item_tree::Attrs,
tt::{Leaf, TokenTree, TopSubtree, TtElement},
};
#[derive(Debug, PartialEq, Eq)]
pub struct ProcMacroDef {
@ -29,8 +31,8 @@ impl ProcMacroKind {
}
}
impl Attrs {
pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
impl Attrs<'_> {
pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
if self.is_proc_macro() {
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
} else if self.is_proc_macro_attribute() {
@ -51,15 +53,10 @@ impl Attrs {
}
}
pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
parse_macro_name_and_helper_attrs(derive)
}
pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
parse_macro_name_and_helper_attrs(derive)
}
}
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
@ -84,14 +81,11 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name
let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
let helpers = helpers
.iter()
.filter(
|tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
)
.map(|tt| match tt {
.filter_map(|tt| match tt {
TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
_ => None,
})
.collect::<Option<Box<[_]>>>()?;
.collect::<Box<[_]>>();
Some((trait_name.as_name(), helpers))
}

View file

@ -186,15 +186,15 @@ impl<'db> Resolver<'db> {
Path::Normal(it) => &it.mod_path,
Path::LangItem(l, seg) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
LangItemTarget::UnionId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAliasId(it) => TypeNs::TypeAliasId(it),
LangItemTarget::StructId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::EnumVariantId(it) => TypeNs::EnumVariantId(it),
LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::Trait(it) => TypeNs::TraitId(it),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
LangItemTarget::TraitId(it) => TypeNs::TraitId(it),
LangItemTarget::FunctionId(_)
| LangItemTarget::ImplId(_)
| LangItemTarget::StaticId(_) => return None,
};
return Some((
type_ns,
@ -334,14 +334,14 @@ impl<'db> Resolver<'db> {
return Some((
ResolveValueResult::ValueNs(
match *l {
LangItemTarget::Function(it) => ValueNs::FunctionId(it),
LangItemTarget::Static(it) => ValueNs::StaticId(it),
LangItemTarget::Struct(it) => ValueNs::StructId(it),
LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
LangItemTarget::Union(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::TypeAlias(_)
| LangItemTarget::Trait(_)
LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it),
LangItemTarget::StaticId(it) => ValueNs::StaticId(it),
LangItemTarget::StructId(it) => ValueNs::StructId(it),
LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it),
LangItemTarget::UnionId(_)
| LangItemTarget::ImplId(_)
| LangItemTarget::TypeAliasId(_)
| LangItemTarget::TraitId(_)
| LangItemTarget::EnumId(_) => return None,
},
None,
@ -351,15 +351,15 @@ impl<'db> Resolver<'db> {
}
Path::LangItem(l, Some(_)) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
LangItemTarget::UnionId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAliasId(it) => TypeNs::TypeAliasId(it),
LangItemTarget::StructId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::EnumVariantId(it) => TypeNs::EnumVariantId(it),
LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::Trait(it) => TypeNs::TraitId(it),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
LangItemTarget::TraitId(it) => TypeNs::TraitId(it),
LangItemTarget::FunctionId(_)
| LangItemTarget::ImplId(_)
| LangItemTarget::StaticId(_) => return None,
};
// Remaining segments start from 0 because lang paths have no segments other than the remaining.
return Some((

View file

@ -21,7 +21,7 @@ use triomphe::Arc;
use crate::{
ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
attr::Attrs,
attrs::AttrFlags,
db::DefDatabase,
expr_store::{
ExpressionStore, ExpressionStoreSourceMap,
@ -31,7 +31,6 @@ use crate::{
},
hir::{ExprId, PatId, generics::GenericParams},
item_tree::{FieldsShape, RawVisibility, visibility_from_ast},
lang_item::LangItem,
src::HasSource,
type_ref::{TraitRef, TypeBound, TypeRefId},
};
@ -48,12 +47,13 @@ pub struct StructSignature {
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
pub shape: FieldsShape,
pub repr: Option<ReprOptions>,
}
bitflags! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct StructFlags: u8 {
/// Indicates whether this struct has `#[repr]`.
const HAS_REPR = 1 << 0;
/// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
/// Indicates whether the struct has a `#[fundamental]` attribute.
@ -75,26 +75,28 @@ impl StructSignature {
pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let InFile { file_id, value: source } = loc.source(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
if let Some(lang) = attrs.lang_item() {
if attrs.contains(AttrFlags::HAS_REPR) {
flags |= StructFlags::HAS_REPR;
}
if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) {
match lang {
LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA,
LangItem::OwnedBox => flags |= StructFlags::IS_BOX,
LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP,
LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL,
LangItem::UnsafePinned => flags |= StructFlags::IS_UNSAFE_PINNED,
_ if lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
_ if lang == sym::owned_box => flags |= StructFlags::IS_BOX,
_ if lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
_ if lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
_ if lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
_ => (),
}
}
let repr = attrs.repr();
let shape = adt_shape(source.kind());
let (store, generic_params, source_map) = lower_generic_params(
@ -112,11 +114,19 @@ impl StructSignature {
flags,
shape,
name: as_name_opt(source.name()),
repr,
}),
Arc::new(source_map),
)
}
#[inline]
pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option<ReprOptions> {
if self.flags.contains(StructFlags::HAS_REPR) {
AttrFlags::repr(db, id.into())
} else {
None
}
}
}
#[inline]
@ -134,22 +144,22 @@ pub struct UnionSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
pub repr: Option<ReprOptions>,
}
impl UnionSignature {
pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
let repr = attrs.repr();
if attrs.contains(AttrFlags::HAS_REPR) {
flags |= StructFlags::HAS_REPR;
}
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
@ -165,7 +175,6 @@ impl UnionSignature {
generic_params,
store,
flags,
repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
@ -186,20 +195,17 @@ pub struct EnumSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: EnumFlags,
pub repr: Option<ReprOptions>,
}
impl EnumSignature {
pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = EnumFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
let repr = attrs.repr();
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
@ -215,15 +221,14 @@ impl EnumSignature {
generic_params,
store,
flags,
repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
)
}
pub fn variant_body_type(&self) -> IntegerType {
match self.repr {
pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType {
match AttrFlags::repr(db, id.into()) {
Some(ReprOptions { int: Some(builtin), .. }) => builtin,
_ => IntegerType::Pointer(true),
}
@ -251,9 +256,9 @@ impl ConstSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = ConstFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
let source = loc.source(db);
@ -306,9 +311,9 @@ impl StaticSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StaticFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
@ -433,7 +438,7 @@ impl TraitSignature {
let loc = id.lookup(db);
let mut flags = TraitFlags::empty();
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let source = loc.source(db);
if source.value.auto_token().is_some() {
flags.insert(TraitFlags::AUTO);
@ -444,34 +449,23 @@ impl TraitSignature {
if source.value.eq_token().is_some() {
flags.insert(TraitFlags::ALIAS);
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= TraitFlags::FUNDAMENTAL;
}
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::rustc_paren_sugar).exists() {
if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) {
flags |= TraitFlags::RUSTC_PAREN_SUGAR;
}
if attrs.by_key(sym::rustc_coinductive).exists() {
if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) {
flags |= TraitFlags::COINDUCTIVE;
}
let mut skip_array_during_method_dispatch =
attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
let mut skip_boxed_slice_during_method_dispatch = false;
for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
for tt in tt.iter() {
if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
skip_array_during_method_dispatch |= ident.sym == sym::array;
skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
}
}
}
if skip_array_during_method_dispatch {
if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
}
if skip_boxed_slice_during_method_dispatch {
if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
}
@ -503,7 +497,8 @@ bitflags! {
const HAS_TARGET_FEATURE = 1 << 9;
const DEPRECATED_SAFE_2024 = 1 << 10;
const EXPLICIT_SAFE = 1 << 11;
const RUSTC_INTRINSIC = 1 << 12;
const HAS_LEGACY_CONST_GENERICS = 1 << 12;
const RUSTC_INTRINSIC = 1 << 13;
}
}
@ -516,8 +511,6 @@ pub struct FunctionSignature {
pub ret_type: Option<TypeRefId>,
pub abi: Option<Symbol>,
pub flags: FnFlags,
// FIXME: we should put this behind a fn flags + query to avoid bloating the struct
pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
}
impl FunctionSignature {
@ -529,23 +522,26 @@ impl FunctionSignature {
let module = loc.container.module(db);
let mut flags = FnFlags::empty();
let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
let attrs = AttrFlags::query(db, id.into());
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
if attrs.by_key(sym::target_feature).exists() {
if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
flags.insert(FnFlags::HAS_TARGET_FEATURE);
}
if attrs.by_key(sym::rustc_intrinsic).exists() {
if attrs.contains(AttrFlags::RUSTC_INTRINSIC) {
flags.insert(FnFlags::RUSTC_INTRINSIC);
}
let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) {
flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS);
}
let source = loc.source(db);
if source.value.unsafe_token().is_some() {
if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) {
flags.insert(FnFlags::DEPRECATED_SAFE_2024);
} else {
flags.insert(FnFlags::UNSAFE);
@ -587,7 +583,6 @@ impl FunctionSignature {
ret_type,
abi,
flags,
legacy_const_generics_indices,
name,
}),
Arc::new(source_map),
@ -636,6 +631,19 @@ impl FunctionSignature {
self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
}
#[inline]
pub fn legacy_const_generics_indices<'db>(
&self,
db: &'db dyn DefDatabase,
id: FunctionId,
) -> Option<&'db [u32]> {
if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) {
return None;
}
AttrFlags::legacy_const_generic_indices(db, id).as_deref()
}
pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
let data = db.function_signature(id);
data.flags.contains(FnFlags::RUSTC_INTRINSIC)
@ -679,11 +687,11 @@ impl TypeAliasSignature {
let loc = id.lookup(db);
let mut flags = TypeAliasFlags::empty();
let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
let attrs = AttrFlags::query(db, id.into());
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
}
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
@ -866,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
let mut has_fields = false;
for (ty, field) in fields.value {
has_fields = true;
match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
match AttrFlags::is_cfg_enabled_for(&field, cfg_options) {
Ok(()) => {
let type_ref =
col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
@ -928,7 +936,6 @@ impl EnumVariants {
let loc = e.lookup(db);
let source = loc.source(db);
let ast_id_map = db.ast_id_map(source.file_id);
let span_map = db.span_map(source.file_id);
let mut diagnostics = ThinVec::new();
let cfg_options = loc.container.krate.cfg_options(db);
@ -940,7 +947,7 @@ impl EnumVariants {
.variants()
.filter_map(|variant| {
let ast_id = ast_id_map.ast_id(&variant);
match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) {
Ok(()) => {
let enum_variant =
EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }

View file

@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast};
use crate::{
AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
UseId, VariantId, attr::Attrs, db::DefDatabase,
UseId, VariantId, attrs::AttrFlags, db::DefDatabase,
};
pub trait HasSource {
@ -145,15 +145,13 @@ impl HasChildSource<LocalFieldId> for VariantId {
(lookup.source(db).map(|it| it.kind()), lookup.container)
}
};
let span_map = db.span_map(src.file_id);
let mut map = ArenaMap::new();
match &src.value {
ast::StructKind::Tuple(fl) => {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
let enabled =
Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}
@ -168,8 +166,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
let enabled =
Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}

View file

@ -190,7 +190,15 @@ impl TestDB {
let mut res = DefMap::ROOT;
for (module, data) in def_map.modules() {
let src = data.definition_source(self);
if src.file_id != position.file_id {
// We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
// `position.file_id` is created before the def map, causing it to have to wrong crate
// attached often, which means it won't compare equal. This should not be a problem in real
// r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
// (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
let Some(file_id) = src.file_id.file_id() else {
continue;
};
if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
@ -230,7 +238,15 @@ impl TestDB {
let mut fn_def = None;
for (_, module) in def_map.modules() {
let file_id = module.definition_source(self).file_id;
if file_id != position.file_id {
// We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
// `position.file_id` is created before the def map, causing it to have to wrong crate
// attached often, which means it won't compare equal. This should not be a problem in real
// r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
// (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
let Some(file_id) = file_id.file_id() else {
continue;
};
if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
for decl in module.scope.declarations() {
@ -253,26 +269,25 @@ impl TestDB {
};
if size != Some(new_size) {
size = Some(new_size);
fn_def = Some(it);
fn_def = Some((it, file_id));
}
}
}
}
// Find the innermost block expression that has a `DefMap`.
let def_with_body = fn_def?.into();
let (def_with_body, file_id) = fn_def?;
let def_with_body = def_with_body.into();
let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body);
let root_syntax_node = self.parse(position.file_id).syntax_node();
let root_syntax_node = self.parse(file_id).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
let expr = ast::Expr::from(block);
let expr_id = source_map
.node_expr(InFile::new(position.file_id.into(), &expr))?
.as_expr()
.unwrap();
let expr_id =
source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap();
let scope = scopes.scope_for(expr_id).unwrap();
Some(scope)
});

View file

@ -23,6 +23,8 @@ triomphe.workspace = true
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
arrayvec.workspace = true
thin-vec.workspace = true
# local deps
stdx.workspace = true

View file

@ -1,200 +1,397 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::iter;
use std::{borrow::Cow, fmt, ops};
//! Defines the basics of attributes lowering.
//!
//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling
//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
//! things from [`Meta`], therefore it contains many parts. The basic idea is:
//!
//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
//! the path only if it has up to 2 segments, or one segment for `path = value`.
//! We also only keep the value in `path = value` if it is a literal. However, we always
//! save the all relevant ranges of attributes (the path range, and the full attribute range)
//! for parts of r-a (e.g. name resolution) that need a faithful representation of the
//! attribute.
//!
//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
//! all attributes.
//!
//! Another thing to note is that we need to be able to map an attribute back to a range
//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an
//! index into the item tree attributes list. To minimize the risk of bugs, we have one
//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
//! an attribute participate in name resolution.
use std::{
borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
};
use ::tt::{TextRange, TextSize};
use arrayvec::ArrayVec;
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use intern::{Interned, Symbol, sym};
use intern::{Interned, Symbol};
use mbe::{DelimiterKind, Punct};
use smallvec::{SmallVec, smallvec};
use span::{Span, SyntaxContext};
use syntax::unescape;
use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
use triomphe::ThinArc;
use parser::T;
use smallvec::SmallVec;
use span::{RealSpanMap, Span, SyntaxContext};
use syntax::{
AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
ast::{self, TokenTreeChildren},
unescape,
};
use syntax_bridge::DocCommentDesugarMode;
use crate::{
AstId,
db::ExpandDatabase,
mod_path::ModPath,
name::Name,
span_map::SpanMapRef,
tt::{self, TopSubtree, token_to_literal},
tt::{self, TopSubtree},
};
/// Syntactical attributes, without filtering of `cfg_attr`s.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct RawAttrs {
// FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted.
entries: Option<ThinArc<(), Attr>>,
#[derive(Debug)]
pub struct AttrPath {
/// This can be empty if the path is not of 1 or 2 segments exactly.
pub segments: ArrayVec<SyntaxToken, 2>,
pub range: TextRange,
// FIXME: This shouldn't be textual, `#[test]` needs name resolution.
// And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
// fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
// attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
// may impact perf. So it was easier to just hack it here.
pub is_test: bool,
}
impl ops::Deref for RawAttrs {
type Target = [Attr];
fn deref(&self) -> &[Attr] {
match &self.entries {
Some(it) => &it.slice,
None => &[],
}
}
}
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
/// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
pub fn new_expanded(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Self {
let entries: Vec<_> =
Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> impl Iterator<Item = Attr> {
collect_attrs(owner).filter_map(move |(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
impl AttrPath {
#[inline]
fn extract(path: &ast::Path) -> Self {
let mut is_test = false;
let segments = (|| {
let mut segments = ArrayVec::new();
let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
if segment2.text() == "test" {
// `#[test]` or `#[core::prelude::vX::test]`.
is_test = true;
}
Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
let span = span_map.span_for_range(comment.syntax().text_range());
let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
Attr {
id,
input: Some(Box::new(AttrInput::Literal(tt::Literal {
symbol: text,
span,
kind,
suffix: None,
}))),
path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
ctxt: span.ctx,
let segment1 = path.qualifier();
if let Some(segment1) = segment1 {
if segment1.qualifier().is_some() {
None
} else {
let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
segments.push(segment1);
segments.push(segment2);
Some(segments)
}
}),
Either::Right(_) => None,
})
}
pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> impl Iterator<Item = Attr> {
Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
}
pub fn merge(&self, other: Self) -> Self {
match (&self.entries, other.entries) {
(None, None) => Self::EMPTY,
(None, entries @ Some(_)) => Self { entries },
(Some(entries), None) => Self { entries: Some(entries.clone()) },
(Some(a), Some(b)) => {
let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
let items = a
.slice
.iter()
.cloned()
.chain(b.slice.iter().map(|it| {
let mut it = it.clone();
let id = it.id.ast_index() + last_ast_index;
it.id = AttrId::new(id, it.id.is_inner_attr());
it
}))
.collect::<Vec<_>>();
Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
} else {
segments.push(segment2);
Some(segments)
}
})();
AttrPath {
segments: segments.unwrap_or(ArrayVec::new()),
range: path.syntax().text_range(),
is_test,
}
}
/// Processes `cfg_attr`s
pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
let has_cfg_attrs =
self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
if !has_cfg_attrs {
return self;
#[inline]
pub fn is1(&self, segment: &str) -> bool {
self.segments.len() == 1 && self.segments[0].text() == segment
}
}
#[derive(Debug)]
pub enum Meta {
/// `name` is `None` if not a single token. `value` is a literal or `None`.
NamedKeyValue {
path_range: TextRange,
name: Option<SyntaxToken>,
value: Option<SyntaxToken>,
},
TokenTree {
path: AttrPath,
tt: ast::TokenTree,
},
Path {
path: AttrPath,
},
}
impl Meta {
#[inline]
pub fn path_range(&self) -> TextRange {
match self {
Meta::NamedKeyValue { path_range, .. } => *path_range,
Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
}
}
fn extract(iter: &mut Peekable<TokenTreeChildren>) -> Option<(Self, TextSize)> {
let mut start_offset = None;
if let Some(NodeOrToken::Token(colon1)) = iter.peek()
&& colon1.kind() == T![:]
{
start_offset = Some(colon1.text_range().start());
iter.next();
iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
}
let first_segment = iter
.next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
.into_token()?;
let mut is_test = first_segment.text() == "test";
let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
let mut segments_len = 1;
let mut second_segment = None;
let mut path_range = first_segment.text_range();
while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& let Some(NodeOrToken::Token(segment)) = iter.peek()
&& segment.kind().is_any_identifier()
{
segments_len += 1;
is_test = segment.text() == "test";
second_segment = Some(segment.clone());
path_range = TextRange::new(path_range.start(), segment.text_range().end());
iter.next();
}
let cfg_options = krate.cfg_options(db);
let new_attrs = self
.iter()
.cloned()
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
.collect::<Vec<_>>();
let entries = if new_attrs.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
let segments = |first, second| {
let mut segments = ArrayVec::new();
if segments_len <= 2 {
segments.push(first);
if let Some(second) = second {
segments.push(second);
}
}
segments
};
RawAttrs { entries }
let meta = match iter.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
iter.next();
let value = match iter.peek() {
Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
// No need to consume it, it will be consumed by `extract_and_eat_comma()`.
Some(token.clone())
}
_ => None,
};
let name = if second_segment.is_none() { Some(first_segment) } else { None };
Meta::NamedKeyValue { path_range, name, value }
}
Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
tt: tt.clone(),
},
_ => Meta::Path {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
},
};
Some((meta, start_offset))
}
pub fn is_empty(&self) -> bool {
self.entries.is_none()
fn extract_possibly_unsafe(
iter: &mut Peekable<TokenTreeChildren>,
container: &ast::TokenTree,
) -> Option<(Self, TextRange)> {
if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
iter.next();
let tt = iter.next()?.into_node()?;
let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
|(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
);
while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
result
} else {
Self::extract(iter).map(|(meta, start_offset)| {
let end_offset = 'find_end_offset: {
for it in iter {
if let NodeOrToken::Token(it) = it
&& it.kind() == T![,]
{
break 'find_end_offset it.text_range().start();
}
}
tt_end_offset(container)
};
(meta, TextRange::new(start_offset, end_offset))
})
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AttrId {
id: u32,
fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
tt.syntax().last_token().unwrap().text_range().start()
}
// FIXME: This only handles a single level of cfg_attr nesting
// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
impl AttrId {
const INNER_ATTR_SET_BIT: u32 = 1 << 31;
/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
#[inline]
pub fn expand_cfg_attr<'a, BreakValue>(
attrs: impl Iterator<Item = ast::Attr>,
cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
expand_cfg_attr_with_doc_comments::<Infallible, _>(
attrs.map(Either::Left),
cfg_options,
move |Either::Left((meta, container, range, top_attr))| {
callback(meta, container, range, top_attr)
},
)
}
pub fn new(id: usize, is_inner: bool) -> Self {
assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
let id = id as u32;
Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
}
#[inline]
pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
mut attrs: impl Iterator<Item = Either<ast::Attr, DocComment>>,
mut cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(
Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
let mut stack = SmallVec::<[_; 1]>::new();
let result = attrs.try_for_each(|top_attr| {
let top_attr = match top_attr {
Either::Left(it) => it,
Either::Right(comment) => return callback(Either::Right(comment)),
};
if let Some((attr_name, tt)) = top_attr.as_simple_call()
&& attr_name == "cfg_attr"
{
let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((tt_iter, tt));
while let Some((tt_iter, tt)) = stack.last_mut() {
let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
stack.pop();
continue;
};
if let Meta::TokenTree { path, tt: nested_tt } = &attr
&& path.is1("cfg_attr")
{
let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((nested_tt_iter, nested_tt.clone()));
}
} else {
callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
}
}
}
} else if let Some(ast_meta) = top_attr.meta()
&& let Some(path) = ast_meta.path()
{
let path = AttrPath::extract(&path);
let meta = if let Some(tt) = ast_meta.token_tree() {
Meta::TokenTree { path, tt }
} else if let Some(value) = ast_meta.expr() {
let value =
if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
let name =
if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
Meta::NamedKeyValue { name, value, path_range: path.range }
} else {
Meta::Path { path }
};
callback(Either::Left((
meta,
ast_meta.syntax(),
ast_meta.syntax().text_range(),
&top_attr,
)))?;
}
ControlFlow::Continue(())
});
result.break_value()
}
pub fn ast_index(&self) -> usize {
(self.id & !Self::INNER_ATTR_SET_BIT) as usize
}
#[inline]
pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
matches!(
name,
"doc"
| "stable"
| "unstable"
| "target_feature"
| "allow"
| "expect"
| "warn"
| "deny"
| "forbid"
| "repr"
| "inline"
| "track_caller"
| "must_use"
)
}
pub fn is_inner_attr(&self) -> bool {
self.id & Self::INNER_ATTR_SET_BIT != 0
}
/// This collects attributes exactly as the item tree needs them. This is used for the item tree,
/// as well as for resolving [`AttrId`]s.
pub fn collect_item_tree_attrs<'a, BreakValue>(
owner: &dyn ast::HasAttrs,
cfg_options: impl Fn() -> &'a CfgOptions,
mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow<BreakValue>,
) -> Option<Either<BreakValue, CfgExpr>> {
let attrs = ast::attrs_including_inner(owner);
expand_cfg_attr(
attrs,
|| cfg_options(),
|attr, container, range, top_attr| {
// We filter builtin attributes that we don't need for nameres, because this saves memory.
// I only put the most common attributes, but if some attribute becomes common feel free to add it.
// Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
let filter = match &attr {
Meta::NamedKeyValue { name: Some(name), .. } => {
is_item_tree_filtered_attr(name.text())
}
Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
let name = path.segments[0].text();
if name == "cfg" {
let cfg =
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
if cfg_options().check(&cfg) == Some(false) {
return ControlFlow::Break(Either::Right(cfg));
}
true
} else {
is_item_tree_filtered_attr(name)
}
}
Meta::Path { path } => {
path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
}
_ => false,
};
if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
return ControlFlow::Break(Either::Left(v));
}
ControlFlow::Continue(())
},
)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Box<AttrInput>>,
pub ctxt: SyntaxContext,
@ -217,131 +414,6 @@ impl fmt::Display for AttrInput {
}
}
impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
let path = ast.path()?;
let range = path.syntax().text_range();
let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
span_map.span_for_range(range).ctx
})?);
let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let token = lit.token();
Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
} else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
Some(Box::new(AttrInput::TokenTree(tree)))
} else {
None
};
Some(Attr { id, path, input, ctxt: span.ctx })
}
fn from_tt(
db: &dyn ExpandDatabase,
mut tt: tt::TokenTreesView<'_>,
id: AttrId,
) -> Option<Attr> {
if matches!(tt.flat_tokens(),
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
if *sym == sym::unsafe_
) {
match tt.iter().nth(1) {
Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
_ => return None,
}
}
let first = tt.flat_tokens().first()?;
let ctxt = first.first_span().ctx;
let (path, input) = {
let mut iter = tt.iter();
let start = iter.savepoint();
let mut input = tt::TokenTreesView::new(&[]);
let mut path = iter.from_savepoint(start);
let mut path_split_savepoint = iter.savepoint();
while let Some(tt) = iter.next() {
path = iter.from_savepoint(start);
if !matches!(
tt,
tt::TtElement::Leaf(
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
)
) {
input = path_split_savepoint.remaining();
break;
}
path_split_savepoint = iter.savepoint();
}
(path, input)
};
let path = Interned::new(ModPath::from_tt(db, path)?);
let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
(_, Some(tree)) => {
Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
}
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
match input.flat_tokens().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
Some(Box::new(AttrInput::Literal(lit.clone())))
}
_ => None,
}
}
_ => None,
};
Some(Attr { id, path, input, ctxt })
}
pub fn path(&self) -> &ModPath {
&self.path
}
pub fn expand_cfg_attr(
self,
db: &dyn ExpandDatabase,
cfg_options: &CfgOptions,
) -> impl IntoIterator<Item = Self> {
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
if !is_cfg_attr {
return smallvec![self];
}
let subtree = match self.token_tree_value() {
Some(it) => it,
_ => return smallvec![self.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![self.clone()],
};
let index = self.id;
let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect::<SmallVec<[_; 1]>>()
}
}
}
impl Attr {
/// #[path = "string"]
pub fn string_value(&self) -> Option<&Symbol> {
@ -403,30 +475,26 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
) -> Option<impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> + 'a> {
let args = self.token_tree_value()?;
if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
return None;
}
let paths = args
.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| {
let span = tts.flat_tokens().first()?.first_span();
Some((ModPath::from_tt(db, tts)?, span))
});
Some(paths)
Some(parse_path_comma_token_tree(db, args))
}
}
pub fn cfg(&self) -> Option<CfgExpr> {
if *self.path.as_ident()? == sym::cfg {
self.token_tree_value().map(CfgExpr::parse)
} else {
None
}
}
fn parse_path_comma_token_tree<'a>(
db: &'a dyn ExpandDatabase,
args: &'a tt::TopSubtree,
) -> impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> {
args.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| {
let span = tts.flat_tokens().first()?.first_span();
Some((ModPath::from_tt(db, tts)?, span, tts))
})
}
fn unescape(s: &str) -> Option<Cow<'_, str>> {
@ -455,58 +523,104 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
}
}
pub fn collect_attrs(
owner: &dyn ast::HasAttrs,
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
let inner_attrs =
inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
.filter(|el| match el {
Either::Left(attr) => attr.kind().is_outer(),
Either::Right(comment) => comment.is_outer(),
})
.zip(iter::repeat(false));
outer_attrs
.chain(inner_attrs)
.enumerate()
.map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
/// This is an index of an attribute *that always points to the item tree attributes*.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AttrId {
id: u32,
}
fn inner_attributes(
syntax: &SyntaxNode,
) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
let node = match_ast! {
match syntax {
ast::SourceFile(_) => syntax.clone(),
ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
ast::Module(it) => it.item_list()?.syntax().clone(),
ast::BlockExpr(it) => {
if !it.may_carry_attributes() {
return None
impl AttrId {
#[inline]
pub fn from_item_tree_index(id: u32) -> Self {
Self { id }
}
#[inline]
pub fn item_tree_index(self) -> u32 {
self.id
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
pub fn find_attr_range<N: ast::HasAttrs>(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: AstId<N>,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
self.find_attr_range_with_source(db, krate, &owner.to_node(db))
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
pub fn find_attr_range_with_source(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: &dyn ast::HasAttrs,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
let cfg_options = OnceCell::new();
let mut index = 0;
let result = collect_item_tree_attrs(
owner,
|| cfg_options.get_or_init(|| krate.cfg_options(db)),
|meta, container, top_attr, range| {
if index == self.id {
return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
}
syntax.clone()
index += 1;
ControlFlow::Continue(())
},
_ => return None,
);
match result {
Some(Either::Left(it)) => it,
_ => {
panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}");
}
}
};
}
let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
Either::Left(attr) => attr.kind().is_inner(),
Either::Right(comment) => comment.is_inner(),
});
Some(attrs)
}
// Input subtree is: `(cfg, $(attr),+)`
// Split it up into a `cfg` subtree and the `attr` subtrees.
fn parse_cfg_attr_input(
subtree: &TopSubtree,
) -> Option<(tt::TokenTreesView<'_>, impl Iterator<Item = tt::TokenTreesView<'_>>)> {
let mut parts = subtree
.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
let cfg = parts.next()?;
Some((cfg, parts.filter(|it| !it.is_empty())))
pub fn find_derive_range(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: AstId<ast::Adt>,
derive_index: u32,
) -> TextRange {
let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
let Meta::TokenTree { tt, .. } = derive_attr else {
return derive_attr_range;
};
// Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
span::FileId::from_raw(0),
));
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&span_map),
span_map.span_for_range(tt.syntax().text_range()),
DocCommentDesugarMode::ProcMacro,
);
let Some((_, _, derive_tts)) =
parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
else {
return derive_attr_range;
};
let (Some(first_tt), Some(last_tt)) =
(derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
else {
return derive_attr_range;
};
let start = first_tt.first_span().range.start();
let end = match last_tt {
tt::TokenTree::Leaf(it) => it.span().range.end(),
tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
};
TextRange::new(start, end)
}
}

View file

@ -392,12 +392,7 @@ fn to_adt_syntax(
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<(ast::Adt, span::SpanMap<SyntaxContext>), ExpandError> {
let (parsed, tm) = crate::db::token_tree_to_syntax_node(
db,
tt,
crate::ExpandTo::Items,
parser::Edition::CURRENT_FIXME,
);
let (parsed, tm) = crate::db::token_tree_to_syntax_node(db, tt, crate::ExpandTo::Items);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
let item =

View file

@ -18,7 +18,7 @@ use syntax::{
use syntax_bridge::syntax_node_to_token_tree;
use crate::{
EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
EditionedFileId, ExpandError, ExpandResult, MacroCallId,
builtin::quote::{WithDelimiter, dollar_crate},
db::ExpandDatabase,
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@ -230,9 +230,9 @@ fn assert_expand(
let mut iter = tt.iter();
let cond = expect_fragment(
db,
&mut iter,
parser::PrefixEntryPoint::Expr,
id.lookup(db).krate.data(db).edition,
tt.top_subtree().delimiter.delim_span(),
);
_ = iter.expect_char(',');
@ -772,7 +772,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
}
}

View file

@ -1,373 +1,346 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
use std::iter::Peekable;
use std::{cell::OnceCell, ops::ControlFlow};
use ::tt::TextRange;
use base_db::Crate;
use cfg::{CfgAtom, CfgExpr};
use intern::{Symbol, sym};
use rustc_hash::FxHashSet;
use cfg::CfgExpr;
use parser::T;
use smallvec::SmallVec;
use syntax::{
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
ast::{self, HasAttrs, TokenTreeChildren},
};
use tracing::{debug, warn};
use syntax_bridge::DocCommentDesugarMode;
use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
use crate::{
attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
db::ExpandDatabase,
fixup::{self, SyntaxFixupUndoInfo},
span_map::SpanMapRef,
tt::{self, DelimSpan, Span},
};
fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
Some(enabled)
struct ItemIsCfgedOut;
#[derive(Debug)]
struct ExpandedAttrToProcess {
range: TextRange,
}
fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum NextExpandedAttrState {
NotStarted,
InTheMiddle,
}
#[derive(Debug)]
struct AstAttrToProcess {
range: TextRange,
expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>,
expanded_attrs_idx: usize,
next_expanded_attr: NextExpandedAttrState,
pound_span: Span,
brackets_span: DelimSpan,
/// If `Some`, this is an inner attribute.
excl_span: Option<Span>,
}
fn macro_input_callback(
db: &dyn ExpandDatabase,
is_derive: bool,
censor_item_tree_attr_ids: &[AttrId],
krate: Crate,
default_span: Span,
span_map: SpanMapRef<'_>,
) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>) {
let cfg_options = OnceCell::new();
let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db));
let mut should_strip_attr = {
let mut item_tree_attr_id = 0;
let mut censor_item_tree_attr_ids_index = 0;
move || {
let mut result = false;
if let Some(&next_censor_attr_id) =
censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index)
&& next_censor_attr_id.item_tree_index() == item_tree_attr_id
{
censor_item_tree_attr_ids_index += 1;
result = true;
}
item_tree_attr_id += 1;
result
}
};
let mut attrs = Vec::new();
let mut attrs_idx = 0;
let mut has_inner_attrs_owner = false;
let mut in_attr = false;
let mut done_with_attrs = false;
let mut did_top_attrs = false;
move |preorder, event| {
match event {
WalkEvent::Enter(SyntaxElement::Node(node)) => {
if done_with_attrs {
return (true, Vec::new());
}
if ast::Attr::can_cast(node.kind()) {
in_attr = true;
let node_range = node.text_range();
while attrs
.get(attrs_idx)
.is_some_and(|it: &AstAttrToProcess| it.range != node_range)
{
attrs_idx += 1;
}
} else if !in_attr && let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) {
// Attributes of the form `key = value` have `ast::Expr` in them, which returns `Some` for
// `AnyHasAttrs::cast()`, so we also need to check `in_attr`.
if has_inner_attrs_owner {
has_inner_attrs_owner = false;
return (true, Vec::new());
}
if did_top_attrs && !is_derive {
// Derives need all attributes handled, but attribute macros need only the top attributes handled.
done_with_attrs = true;
return (true, Vec::new());
}
did_top_attrs = true;
if let Some(inner_attrs_node) = has_attrs.inner_attributes_node()
&& inner_attrs_node != *node
{
has_inner_attrs_owner = true;
}
let node_attrs = ast::attrs_including_inner(&has_attrs);
attrs.clear();
node_attrs.clone().for_each(|attr| {
let span_for = |token: Option<SyntaxToken>| {
token
.map(|token| span_map.span_for_range(token.text_range()))
.unwrap_or(default_span)
};
attrs.push(AstAttrToProcess {
range: attr.syntax().text_range(),
pound_span: span_for(attr.pound_token()),
brackets_span: DelimSpan {
open: span_for(attr.l_brack_token()),
close: span_for(attr.r_brack_token()),
},
excl_span: attr
.excl_token()
.map(|token| span_map.span_for_range(token.text_range())),
expanded_attrs: SmallVec::new(),
expanded_attrs_idx: 0,
next_expanded_attr: NextExpandedAttrState::NotStarted,
});
});
attrs_idx = 0;
let strip_current_item = expand_cfg_attr(
node_attrs,
&cfg_options,
|attr, _container, range, top_attr| {
// Find the attr.
while attrs[attrs_idx].range != top_attr.syntax().text_range() {
attrs_idx += 1;
}
let mut strip_current_attr = false;
match attr {
Meta::NamedKeyValue { name, .. } => {
if name
.is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
{
strip_current_attr = should_strip_attr();
}
}
Meta::TokenTree { path, tt } => {
if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
{
strip_current_attr = should_strip_attr();
}
if path.segments.len() == 1 {
let name = path.segments[0].text();
if name == "cfg" {
let cfg_expr = CfgExpr::parse_from_ast(
&mut TokenTreeChildren::new(&tt).peekable(),
);
if cfg_options().check(&cfg_expr) == Some(false) {
return ControlFlow::Break(ItemIsCfgedOut);
}
strip_current_attr = true;
}
}
}
Meta::Path { path } => {
if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
{
strip_current_attr = should_strip_attr();
}
}
}
if !strip_current_attr {
attrs[attrs_idx]
.expanded_attrs
.push(ExpandedAttrToProcess { range });
}
ControlFlow::Continue(())
},
);
attrs_idx = 0;
if strip_current_item.is_some() {
preorder.skip_subtree();
attrs.clear();
'eat_comma: {
// If there is a comma after this node, eat it too.
let mut events_until_comma = 0;
for event in preorder.clone() {
match event {
WalkEvent::Enter(SyntaxElement::Node(_))
| WalkEvent::Leave(_) => {}
WalkEvent::Enter(SyntaxElement::Token(token)) => {
let kind = token.kind();
if kind == T![,] {
break;
} else if !kind.is_trivia() {
break 'eat_comma;
}
}
}
events_until_comma += 1;
}
preorder.nth(events_until_comma);
}
return (false, Vec::new());
}
}
}
WalkEvent::Leave(SyntaxElement::Node(node)) => {
if ast::Attr::can_cast(node.kind()) {
in_attr = false;
attrs_idx += 1;
}
}
WalkEvent::Enter(SyntaxElement::Token(token)) => {
if !in_attr {
return (true, Vec::new());
}
let Some(ast_attr) = attrs.get_mut(attrs_idx) else {
return (true, Vec::new());
};
let token_range = token.text_range();
let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx)
else {
// No expanded attributes in this `ast::Attr`, or we finished them all already, either way
// the remaining tokens should be discarded.
return (false, Vec::new());
};
match ast_attr.next_expanded_attr {
NextExpandedAttrState::NotStarted => {
if token_range.start() >= expanded_attr.range.start() {
// We started the next attribute.
let mut insert_tokens = Vec::with_capacity(3);
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '#',
spacing: tt::Spacing::Alone,
span: ast_attr.pound_span,
}));
if let Some(span) = ast_attr.excl_span {
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '!',
spacing: tt::Spacing::Alone,
span,
}));
}
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '[',
spacing: tt::Spacing::Alone,
span: ast_attr.brackets_span.open,
}));
ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle;
return (true, insert_tokens);
} else {
// Before any attribute or between the attributes.
return (false, Vec::new());
}
}
NextExpandedAttrState::InTheMiddle => {
if token_range.start() >= expanded_attr.range.end() {
// Finished the current attribute.
let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
char: ']',
spacing: tt::Spacing::Alone,
span: ast_attr.brackets_span.close,
})];
ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted;
ast_attr.expanded_attrs_idx += 1;
// It's safe to ignore the current token because between attributes
// there is always at least one token we skip - either the closing bracket
// in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion.
return (false, insert_tokens);
} else {
// Still in the middle.
return (true, Vec::new());
}
}
}
}
WalkEvent::Leave(SyntaxElement::Token(_)) => {}
}
(true, Vec::new())
}
check_cfg_attr_value(db, &attr.token_tree()?, krate)
}
pub(crate) fn attr_macro_input_to_token_tree(
db: &dyn ExpandDatabase,
node: &SyntaxNode,
span_map: SpanMapRef<'_>,
span: Span,
is_derive: bool,
censor_item_tree_attr_ids: &[AttrId],
krate: Crate,
) -> (tt::TopSubtree, SyntaxFixupUndoInfo) {
let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro);
(
syntax_bridge::syntax_node_to_token_tree_modified(
node,
span_map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map),
),
fixups.undo_info,
)
}
pub fn check_cfg_attr_value(
db: &dyn ExpandDatabase,
attr: &TokenTree,
attr: &ast::TokenTree,
krate: Crate,
) -> Option<bool> {
let cfg_expr = parse_from_attr_token_tree(attr)?;
let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
Some(enabled)
}
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
db: &dyn ExpandDatabase,
items: impl Iterator<Item = I>,
krate: Crate,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
for item in items {
let field_attrs = item.attrs();
'attrs: for attr in field_attrs {
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
debug!("censoring {:?}", item.syntax());
remove.insert(item.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&item, remove);
break 'attrs;
}
}
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
}
Some(())
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum CfgExprStage {
/// Stripping the CFGExpr part of the attribute
StrippigCfgExpr,
/// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
FoundComma,
/// Everything following the attribute. This could be another attribute or the end of the attribute.
// FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
EverythingElse,
}
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
debug!("Enabling attribute {}", meta);
let meta_path = meta.path()?;
debug!("Removing {:?}", meta_path.syntax());
remove.insert(meta_path.syntax().clone().into());
let meta_tt = meta.token_tree()?;
debug!("meta_tt {}", meta_tt);
let mut stage = CfgExprStage::StrippigCfgExpr;
for tt in meta_tt.token_trees_and_tokens() {
debug!("Checking {:?}. Stage: {:?}", tt, stage);
match (stage, tt) {
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
if token.kind() == T![,] {
stage = CfgExprStage::FoundComma;
}
remove.insert(token.into());
}
(CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
if (token.kind() == T![,] || token.kind() == T![')']) =>
{
// The end of the attribute or separator for the next attribute
stage = CfgExprStage::EverythingElse;
remove.insert(token.into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
remove.insert(token.into());
}
// This is an actual attribute
_ => {}
}
}
if stage != CfgExprStage::EverythingElse {
warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
return None;
}
Some(remove)
}
/// Removes a possible comma after the [AstNode]
fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
res.insert(comma);
}
}
fn process_enum(
db: &dyn ExpandDatabase,
variants: VariantList,
krate: Crate,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
'variant: for variant in variants.variants() {
for attr in variant.attrs() {
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
// Rustc does not strip the attribute if it is enabled. So we will leave it
debug!("censoring type {:?}", variant.syntax());
remove.insert(variant.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&variant, remove);
continue 'variant;
}
}
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", variant.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
if let Some(fields) = variant.field_list() {
match fields {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
}
}
}
Some(())
}
pub(crate) fn process_cfg_attrs(
db: &dyn ExpandDatabase,
node: &SyntaxNode,
loc: &MacroCallLoc,
) -> Option<FxHashSet<SyntaxElement>> {
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind {
MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
_ => false,
};
let mut remove = FxHashSet::default();
let item = ast::Item::cast(node.clone())?;
for attr in item.attrs() {
if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("Removing type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
if is_derive {
// Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
// (cfg_attr is handled above, cfg is handled in the def map).
match item {
ast::Item::Struct(it) => match it.field_list()? {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(
db,
fields.fields(),
loc.krate,
&mut remove,
)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(
db,
fields.fields(),
loc.krate,
&mut remove,
)?;
}
},
ast::Item::Enum(it) => {
process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
}
ast::Item::Union(it) => {
process_has_attrs_with_possible_comma(
db,
it.record_field_list()?.fields(),
loc.krate,
&mut remove,
)?;
}
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
_ => {}
}
}
Some(remove)
}
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
let mut iter = tt
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
next_cfg_expr_from_syntax(&mut iter)
}
fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
}
fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
}
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
where
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
{
let name = match iter.next() {
None => return None,
Some(NodeOrToken::Token(element)) => match element.kind() {
syntax::T![ident] => Symbol::intern(element.text()),
_ => return Some(CfgExpr::Invalid),
},
Some(_) => return Some(CfgExpr::Invalid),
};
let result = match &name {
s if [&sym::all, &sym::any, &sym::not].contains(&s) => {
let mut preds = Vec::new();
let Some(NodeOrToken::Node(tree)) = iter.next() else {
return Some(CfgExpr::Invalid);
};
let mut tree_iter = tree
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
while tree_iter.peek().is_some() {
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
if let Some(pred) = pred {
preds.push(pred);
}
}
let group = match &name {
s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()),
s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()),
s if *s == sym::not => {
CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid)))
}
_ => unreachable!(),
};
Some(group)
}
_ => match iter.peek() {
Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
iter.next();
match iter.next() {
Some(NodeOrToken::Token(value_token))
if (value_token.kind() == syntax::SyntaxKind::STRING) =>
{
let value = value_token.text();
Some(CfgExpr::Atom(CfgAtom::KeyValue {
key: name,
value: Symbol::intern(value.trim_matches('"')),
}))
}
_ => None,
}
}
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
},
};
if let Some(NodeOrToken::Token(element)) = iter.peek()
&& element.kind() == syntax::T![,]
{
iter.next();
}
result
}
#[cfg(test)]
mod tests {
use cfg::DnfExpr;
use expect_test::{Expect, expect};
use syntax::{AstNode, SourceFile, ast::Attr};
use crate::cfg_process::parse_from_attr_token_tree;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input, span::Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it,
None => {
let node = std::any::type_name::<Attr>();
panic!("Failed to make ast node `{node}` from text {input}")
}
};
let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into());
let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}
#[test]
fn cfg_from_attr() {
check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
}
let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
krate.cfg_options(db).check(&cfg_expr)
}

View file

@ -1,11 +1,9 @@
//! Defines database & queries for macro expansion.
use base_db::{Crate, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
@ -13,9 +11,9 @@ use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
attrs::Meta,
builtin::pseudo_derive_attr_expansion,
cfg_process,
cfg_process::attr_macro_input_to_token_tree,
declarative::DeclarativeMacroExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@ -177,7 +175,7 @@ pub fn expand_speculative(
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
let (mut tt, undo_info) = match &loc.kind {
MacroCallKind::FnLike { .. } => (
syntax_bridge::syntax_node_to_token_tree(
speculative_args,
@ -200,48 +198,35 @@ pub fn expand_speculative(
),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { derive_attr_index: index, .. }
| MacroCallKind::Attr { invoc_attr_index: index, .. } => {
let censor = if let MacroCallKind::Derive { .. } = loc.kind {
censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
} else {
attr_source(index, &ast::Item::cast(speculative_args.clone())?)
.into_iter()
.map(|it| it.syntax().clone().into())
.collect()
MacroCallKind::Derive { derive_macro_id, .. } => {
let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } =
&derive_macro_id.loc(db).kind
else {
unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`");
};
let censor_cfg =
cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(
span_map,
attr_macro_input_to_token_tree(
db,
speculative_args,
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
});
fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
(
syntax_bridge::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
true,
attr_ids,
loc.krate,
)
}
MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree(
db,
speculative_args,
span_map,
span,
false,
attr_ids,
loc.krate,
),
};
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr_arg = match &loc.kind {
MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
@ -260,18 +245,21 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
match attr.input.as_deref()? {
AttrInput::TokenTree(tt) => {
let mut attr_arg = tt.clone();
attr_arg.top_subtree_delimiter_mut().kind =
tt::DelimiterKind::Invisible;
Some(attr_arg)
}
AttrInput::Literal(_) => None,
let (_, _, _, meta) =
attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
match meta {
Meta::TokenTree { tt, .. } => {
let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
Some(attr_arg)
}
})
_ => None,
}
}
}
_ => None,
@ -299,7 +287,7 @@ pub fn expand_speculative(
}
MacroDefKind::Declarative(it, _) => db
.decl_macro_expander(loc.krate, it)
.expand_unhygienic(tt, loc.kind.call_style(), span, loc.def.edition),
.expand_unhygienic(db, tt, loc.kind.call_style(), span),
MacroDefKind::BuiltIn(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
@ -315,8 +303,7 @@ pub fn expand_speculative(
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) =
token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to, loc.def.edition);
let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
@ -358,7 +345,6 @@ fn parse_macro_expansion(
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file);
let def_edition = loc.def.edition;
let expand_to = loc.expand_to();
let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
@ -369,7 +355,6 @@ fn parse_macro_expansion(
CowArc::Owned(it) => it,
},
expand_to,
def_edition,
);
rev_token_map.matched_arm = matched_arm;
@ -433,7 +418,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let (censor, item_node, span) = match loc.kind {
let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let path_range = node
@ -501,53 +486,29 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
MacroCallKind::Derive { .. } => {
unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`")
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
let attr_source = attr_source(invoc_attr_index, &node);
let range = attr_ids
.invoc_attr()
.find_attr_range_with_source(db, loc.krate, &node)
.3
.path_range();
let span = map.span_for_range(range);
let span = map.span_for_range(
attr_source
.as_ref()
.and_then(|it| it.path())
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
);
// If derive attribute we need to censor the derive input
if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
&& ast::Adt::can_cast(node.syntax().kind())
{
let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
(censor_derive_input, node, span)
} else {
(attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
}
let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
(is_derive, &**attr_ids, node, span)
}
};
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
let mut fixups =
fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
});
fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
(
syntax_bridge::syntax_node_to_token_tree_modified(
syntax,
map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
)
};
let (mut tt, undo_info) = attr_macro_input_to_token_tree(
db,
item_node.syntax(),
map.as_ref(),
span,
is_derive,
censor_item_tree_attr_ids,
loc.krate,
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@ -557,31 +518,6 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
(Arc::new(tt), undo_info, span)
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(derive_censoring);
collect_attrs(node)
.take(derive_attr_index.ast_index() + 1)
.filter_map(|(_, attr)| Either::left(attr))
// FIXME, this resolution should not be done syntactically
// derive is a proper macro now, no longer builtin
// But we do not have resolution at this stage, this means
// we need to know about all macro calls for the given ast item here
// so we require some kind of mapping...
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone().into())
.collect()
}
/// Attributes expect the invoking attribute to be stripped
fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(attribute_macro_attr_censoring);
collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
}
impl TokenExpander {
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
@ -731,7 +667,6 @@ pub(crate) fn token_tree_to_syntax_node(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
expand_to: ExpandTo,
edition: parser::Edition,
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
@ -740,7 +675,7 @@ pub(crate) fn token_tree_to_syntax_node(
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
};
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition)
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db))
}
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {

View file

@ -1,17 +1,21 @@
//! Compiled declarative macro expanders (`macro_rules!` and `macro`)
use std::{cell::OnceCell, ops::ControlFlow};
use base_db::Crate;
use intern::sym;
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, ast};
use syntax::{
AstNode, AstToken,
ast::{self, HasAttrs},
};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
MacroCallStyle,
attrs::RawAttrs,
attrs::{Meta, expand_cfg_attr},
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
tt,
@ -42,6 +46,7 @@ impl DeclarativeMacroExpander {
None => self
.mac
.expand(
db,
&tt,
|s| {
s.ctx =
@ -49,7 +54,6 @@ impl DeclarativeMacroExpander {
},
loc.kind.call_style(),
span,
loc.def.edition,
)
.map_err(Into::into),
}
@ -57,10 +61,10 @@ impl DeclarativeMacroExpander {
pub fn expand_unhygienic(
&self,
db: &dyn ExpandDatabase,
tt: tt::TopSubtree,
call_style: MacroCallStyle,
call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::TopSubtree> {
match self.mac.err() {
Some(_) => ExpandResult::new(
@ -69,7 +73,7 @@ impl DeclarativeMacroExpander {
),
None => self
.mac
.expand(&tt, |_| (), call_style, call_site, def_site_edition)
.expand(db, &tt, |_| (), call_style, call_site)
.map(TupleExt::head)
.map_err(Into::into),
}
@ -83,29 +87,28 @@ impl DeclarativeMacroExpander {
let (root, map) = crate::db::parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
match attrs
.iter()
.find(|it| {
it.path
.as_ident()
.map(|it| *it == sym::rustc_macro_transparency)
.unwrap_or(false)
})?
.token_tree_value()?
.token_trees()
.flat_tokens()
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
s if *s == sym::transparent => Some(Transparency::Transparent),
s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
s if *s == sym::opaque => Some(Transparency::Opaque),
_ => None,
let transparency = |node: ast::AnyHasAttrs| {
let cfg_options = OnceCell::new();
expand_cfg_attr(
node.attrs(),
|| cfg_options.get_or_init(|| def_crate.cfg_options(db)),
|attr, _, _, _| {
if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
&& name.text() == "rustc_macro_transparency"
&& let Some(value) = value.and_then(ast::String::cast)
&& let Ok(value) = value.value()
{
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
"semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
"opaque" => ControlFlow::Break(Transparency::Opaque),
_ => ControlFlow::Continue(()),
}
} else {
ControlFlow::Continue(())
}
},
_ => None,
}
)
};
let ctx_edition = |ctx: SyntaxContext| {
if ctx.is_root() {
@ -136,7 +139,8 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
transparency(ast::AnyHasAttrs::from(macro_rules))
.unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
@ -164,7 +168,7 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
transparency(macro_def.into()).unwrap_or(Transparency::Opaque),
),
};
let edition = ctx_edition(match id.file_id {

View file

@ -55,30 +55,6 @@ impl From<FilePosition> for HirFilePosition {
}
}
impl FilePositionWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
FilePositionWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
offset: self.offset,
}
}
}
impl FileRangeWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
FileRangeWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
range: self.range,
}
}
}
impl<T> InFileWrapper<span::FileId, T> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
}
}
impl HirFileRange {
pub fn file_range(self) -> Option<FileRange> {
Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
@ -407,7 +383,7 @@ impl InFile<SyntaxToken> {
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
loc.kind.original_call_range(db, loc.krate)
}
}
}
@ -453,7 +429,10 @@ impl InFile<TextRange> {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
(
loc.kind.original_call_range(db, loc.krate),
SyntaxContext::root(loc.def.edition),
)
}
}
}
@ -468,7 +447,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
loc.kind.original_call_range(db, loc.krate)
}
}
}

View file

@ -523,6 +523,7 @@ mod tests {
fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
|_, _| (true, Vec::new()),
);
let actual = format!("{tt}\n");
@ -535,7 +536,6 @@ mod tests {
&tt,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| parser::Edition::CURRENT,
parser::Edition::CURRENT,
);
assert!(
parse.errors().is_empty(),
@ -698,7 +698,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {a . __ra_fixup ;}
fn foo () {a .__ra_fixup ;}
"#]],
)
}
@ -713,7 +713,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {a . __ra_fixup ; bar () ;}
fn foo () {a .__ra_fixup ; bar () ;}
"#]],
)
}

View file

@ -25,18 +25,17 @@ mod cfg_process;
mod fixup;
mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
use thin_vec::ThinVec;
use triomphe::Arc;
use core::fmt;
use std::hash::Hash;
use std::{hash::Hash, ops};
use base_db::Crate;
use either::Either;
use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@ -317,9 +316,6 @@ pub enum MacroCallKind {
Derive {
ast_id: AstId<ast::Adt>,
/// Syntactical index of the invoking `#[derive]` attribute.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
derive_attr_index: AttrId,
/// Index of the derive macro in the derive attribute
derive_index: u32,
@ -329,17 +325,68 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
// but we need to fix the `cfg_attr` handling first.
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`.
attr_args: Option<Arc<tt::TopSubtree>>,
/// Syntactical index of the invoking `#[attribute]`.
/// This contains the list of all *active* attributes (derives and attr macros) preceding this
/// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute
/// by calling [`invoc_attr()`] on this.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
invoc_attr_index: AttrId,
/// The macro should not see the attributes here.
///
/// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr
censored_attr_ids: AttrMacroAttrIds,
},
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr);
impl AttrMacroAttrIds {
#[inline]
pub fn from_one(id: AttrId) -> Self {
Self(AttrMacroAttrIdsRepr::One(id))
}
#[inline]
pub fn from_many(ids: &[AttrId]) -> Self {
if let &[id] = ids {
Self(AttrMacroAttrIdsRepr::One(id))
} else {
Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect()))
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum AttrMacroAttrIdsRepr {
One(AttrId),
ManyDerives(ThinVec<AttrId>),
}
impl ops::Deref for AttrMacroAttrIds {
type Target = [AttrId];
#[inline]
fn deref(&self) -> &Self::Target {
match &self.0 {
AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one),
AttrMacroAttrIdsRepr::ManyDerives(many) => many,
}
}
}
impl AttrMacroAttrIds {
#[inline]
pub fn invoc_attr(&self) -> AttrId {
match &self.0 {
AttrMacroAttrIdsRepr::One(it) => *it,
AttrMacroAttrIdsRepr::ManyDerives(it) => {
*it.last().expect("should always have at least one `AttrId`")
}
}
}
}
impl MacroCallKind {
pub(crate) fn call_style(&self) -> MacroCallStyle {
match self {
@ -597,34 +644,20 @@ impl MacroDefId {
impl MacroCallLoc {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind {
match &self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
collect_attrs(&it)
.nth(derive_attr_index.ast_index())
.and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
.unwrap_or_else(|| it.syntax().clone())
})
let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
.and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
.unwrap_or_else(|| it.syntax().clone())
})
let (attr, _, _, _) =
attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
} else {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
@ -729,7 +762,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
/// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@ -751,24 +784,11 @@ impl MacroCallKind {
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
.1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
derive_attr_index.find_attr_range(db, krate, ast_id).2
}
// FIXME: handle `cfg_attr`
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
.expect("missing attribute")
.1
.expect_left("attribute macro is a doc comment?")
.syntax()
.text_range()
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
}
};
@ -887,7 +907,8 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let file_id =
EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@ -943,7 +964,7 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
@ -969,36 +990,12 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
pub fn map_node_range_up_aggregated(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
let mut map = FxHashMap::default();
for span in exp_map.spans_for_range(range) {
let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
*range = TextRange::new(
range.start().min(span.range.start()),
range.end().max(span.range.end()),
);
}
for ((anchor, _), range) in &mut map {
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
}
/// Looks up the span at the given offset.
pub fn span_for_offset(
db: &dyn ExpandDatabase,
@ -1006,7 +1003,7 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
(FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
@ -1076,7 +1073,7 @@ impl ExpandTo {
}
}
intern::impl_internable!(ModPath, attrs::AttrInput);
intern::impl_internable!(ModPath);
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
@ -1139,6 +1136,14 @@ impl HirFileId {
HirFileId::MacroFile(_) => None,
}
}
#[inline]
pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
match self {
HirFileId::FileId(it) => it.krate(db),
HirFileId::MacroFile(it) => it.loc(db).krate,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {

View file

@ -2,7 +2,7 @@
use std::{
fmt::{self, Display as _},
iter,
iter::{self, Peekable},
};
use crate::{
@ -12,10 +12,11 @@ use crate::{
tt,
};
use base_db::Crate;
use intern::sym;
use intern::{Symbol, sym};
use parser::T;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstNode, ast};
use syntax::{AstNode, SyntaxToken, ast};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath {
@ -64,6 +65,58 @@ impl ModPath {
ModPath { kind, segments: SmallVec::new_const() }
}
pub fn from_tokens(
db: &dyn ExpandDatabase,
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
is_abs: bool,
segments: impl Iterator<Item = SyntaxToken>,
) -> Option<ModPath> {
let mut segments = segments.peekable();
let mut result = SmallVec::new_const();
let path_kind = if is_abs {
PathKind::Abs
} else {
let first = segments.next()?;
match first.kind() {
T![crate] => PathKind::Crate,
T![self] => PathKind::Super(handle_super(&mut segments)),
T![super] => PathKind::Super(1 + handle_super(&mut segments)),
T![ident] => {
let first_text = first.text();
if first_text == "$crate" {
let ctxt = span_for_range(first.text_range());
resolve_crate_root(db, ctxt)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate)
} else {
result.push(Name::new_symbol_root(Symbol::intern(first_text)));
PathKind::Plain
}
}
_ => return None,
}
};
for segment in segments {
if segment.kind() != T![ident] {
return None;
}
result.push(Name::new_symbol_root(Symbol::intern(segment.text())));
}
if result.is_empty() {
return None;
}
result.shrink_to_fit();
return Some(ModPath { kind: path_kind, segments: result });
fn handle_super(segments: &mut Peekable<impl Iterator<Item = SyntaxToken>>) -> u8 {
let mut result = 0;
while segments.next_if(|it| it.kind() == T![super]).is_some() {
result += 1;
}
result
}
}
pub fn segments(&self) -> &[Name] {
&self.segments
}

View file

@ -1,13 +1,12 @@
//! Span maps for real files and macro expansions.
use span::{Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@ -110,26 +109,24 @@ pub(crate) fn real_span_map(
// them anchors too, but only if they have no attributes attached, as those might be proc-macros
// and using different anchors inside of them will prevent spans from being joinable.
tree.items().for_each(|item| match &item {
ast::Item::ExternBlock(it)
if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
{
ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(extern_item_list) = it.extern_item_list() {
pairs.extend(
extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
);
}
}
ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
}
ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(item_list) = it.item_list() {
pairs.extend(item_list.items().map(item_to_entry));
}
}
ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}

View file

@ -53,6 +53,7 @@ hir-expand.workspace = true
base-db.workspace = true
syntax.workspace = true
span.workspace = true
thin-vec = "0.2.14"
[dev-dependencies]
expect-test = "1.5.1"

View file

@ -5,7 +5,7 @@
use std::fmt;
use hir_def::{TraitId, TypeAliasId, lang_item::LangItem};
use hir_def::{TraitId, TypeAliasId};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use tracing::debug;
use triomphe::Arc;
@ -38,7 +38,7 @@ pub fn autoderef<'db>(
env: Arc<TraitEnvironment<'db>>,
ty: Canonical<'db, Ty<'db>>,
) -> impl Iterator<Item = Ty<'db>> + use<'db> {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let (ty, _) = infcx.instantiate_canonical(&ty);
let autoderef = Autoderef::new(&infcx, &env, ty);
@ -301,36 +301,28 @@ where
self.infcx().interner
}
#[inline]
fn db(&self) -> &'db dyn HirDatabase {
self.interner().db
}
fn autoderef_traits(&mut self) -> Option<AutoderefTraits> {
let lang_items = self.interner().lang_items();
match &mut self.traits {
Some(it) => Some(*it),
None => {
let traits = if self.use_receiver_trait {
(|| {
Some(AutoderefTraits {
trait_: LangItem::Receiver
.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::ReceiverTarget
.resolve_type_alias(self.db(), self.env().krate)?,
trait_: lang_items.Receiver?,
trait_target: lang_items.ReceiverTarget?,
})
})()
.or_else(|| {
Some(AutoderefTraits {
trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.db(), self.env().krate)?,
trait_: lang_items.Deref?,
trait_target: lang_items.DerefTarget?,
})
})?
} else {
AutoderefTraits {
trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.db(), self.env().krate)?,
trait_: lang_items.Deref?,
trait_target: lang_items.DerefTarget?,
}
};
Some(*self.traits.insert(traits))

View file

@ -6,6 +6,7 @@ mod tests;
use base_db::Crate;
use hir_def::{
ConstId, EnumVariantId, GeneralConstId, StaticId,
attrs::AttrFlags,
expr_store::Body,
hir::{Expr, ExprId},
type_ref::LiteralConstRef,
@ -83,7 +84,7 @@ pub fn intern_const_ref<'a>(
ty: Ty<'a>,
krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_with(db, Some(krate), None);
let interner = DbInterner::new_no_crate(db);
let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate));
let kind = match value {
LiteralConstRef::Int(i) => {
@ -128,7 +129,7 @@ pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Cr
intern_const_ref(
db,
&value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
Ty::new_uint(DbInterner::new_with(db, Some(krate), None), rustc_type_ir::UintTy::Usize),
Ty::new_uint(DbInterner::new_no_crate(db), rustc_type_ir::UintTy::Usize),
krate,
)
}
@ -183,7 +184,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
db: &'db dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
@ -200,7 +201,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
return Ok(value);
}
let repr = db.enum_signature(loc.parent).repr;
let repr = AttrFlags::repr(db, loc.parent.into());
let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
let mir_body = db.monomorphized_mir_body(
@ -292,7 +293,7 @@ pub(crate) fn const_eval_static_query<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::new_from_iter(interner, []),

View file

@ -123,7 +123,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError<'_>> {
let _tracing = setup_tracing();
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;

View file

@ -12,7 +12,7 @@ use salsa::plumbing::AsId;
use triomphe::Arc;
use crate::{
ImplTraitId, InferenceResult, TraitEnvironment, TyDefId, ValueTyDefId,
ImplTraitId, TraitEnvironment, TyDefId, ValueTyDefId,
consteval::ConstEvalError,
dyn_compatibility::DynCompatibilityViolation,
layout::{Layout, LayoutError},
@ -23,10 +23,6 @@ use crate::{
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::infer::infer_query)]
#[salsa::cycle(cycle_result = crate::infer::infer_cycle_result)]
fn infer<'db>(&'db self, def: DefWithBodyId) -> Arc<InferenceResult<'db>>;
// region:mir
// FXME: Collapse `mir_body_for_closure` into `mir_body`

View file

@ -17,8 +17,8 @@ use std::fmt;
use hir_def::{
AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
HirFileId,
@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> {
// Don't run the lint on extern "[not Rust]" fn items with the
// #[no_mangle] attribute.
let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE);
if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
cov_mark::hit!(extern_func_no_mangle_ignored);
} else {
@ -563,7 +563,7 @@ impl<'a> DeclValidator<'a> {
cov_mark::hit!(extern_static_incorrect_case_ignored);
return;
}
if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() {
if AttrFlags::query(self.db, static_id.into()).contains(AttrFlags::NO_MANGLE) {
cov_mark::hit!(no_mangle_static_incorrect_case_ignored);
return;
}

View file

@ -8,7 +8,7 @@ use base_db::Crate;
use either::Either;
use hir_def::{
AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
lang_item::LangItem,
lang_item::LangItems,
resolver::{HasResolver, ValueNs},
};
use intern::sym;
@ -76,10 +76,10 @@ impl BodyValidationDiagnostic {
validate_lints: bool,
) -> Vec<BodyValidationDiagnostic> {
let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner);
let infer = InferenceResult::for_body(db, owner);
let body = db.body(owner);
let env = db.trait_environment_for_body(owner);
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx =
interner.infer_ctxt().build(TypingMode::typeck_for_body(interner, owner.into()));
let mut validator = ExprValidator {
@ -99,7 +99,7 @@ impl BodyValidationDiagnostic {
struct ExprValidator<'db> {
owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult<'db>>,
infer: &'db InferenceResult<'db>,
env: Arc<TraitEnvironment<'db>>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
@ -124,7 +124,7 @@ impl<'db> ExprValidator<'db> {
for (id, expr) in body.exprs() {
if let Some((variant, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
record_literal_missing_fields(db, self.infer, id, expr)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
record: Either::Left(id),
@ -155,7 +155,7 @@ impl<'db> ExprValidator<'db> {
for (id, pat) in body.pats() {
if let Some((variant, missed_fields, true)) =
record_pattern_missing_fields(db, &self.infer, id, pat)
record_pattern_missing_fields(db, self.infer, id, pat)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
record: Either::Right(id),
@ -187,7 +187,7 @@ impl<'db> ExprValidator<'db> {
};
let checker = filter_map_next_checker.get_or_insert_with(|| {
FilterMapNextChecker::new(&self.owner.resolver(self.db()), self.db())
FilterMapNextChecker::new(self.infcx.interner.lang_items(), self.db())
});
if checker.check(call_id, receiver, &callee).is_some() {
@ -240,7 +240,7 @@ impl<'db> ExprValidator<'db> {
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
&& types_of_subpatterns_do_match(arm.pat, &self.body, self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
@ -388,7 +388,7 @@ impl<'db> ExprValidator<'db> {
pat: PatId,
have_errors: &mut bool,
) -> DeconstructedPat<'a, 'db> {
let mut patcx = match_check::PatCtxt::new(self.db(), &self.infer, &self.body);
let mut patcx = match_check::PatCtxt::new(self.db(), self.infer, &self.body);
let pattern = patcx.lower_pattern(pat);
let pattern = cx.lower_pat(&pattern);
if !patcx.errors.is_empty() {
@ -497,11 +497,9 @@ struct FilterMapNextChecker<'db> {
}
impl<'db> FilterMapNextChecker<'db> {
fn new(resolver: &hir_def::resolver::Resolver<'db>, db: &'db dyn HirDatabase) -> Self {
fn new(lang_items: &'db LangItems, db: &'db dyn HirDatabase) -> Self {
// Find and store the FunctionIds for Iterator::filter_map and Iterator::next
let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext
.resolve_function(db, resolver.krate())
{
let (next_function_id, filter_map_function_id) = match lang_items.IteratorNext {
Some(next_function_id) => (
Some(next_function_id),
match next_function_id.lookup(db).container {

View file

@ -2,7 +2,9 @@
use std::{cell::LazyCell, fmt};
use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use hir_def::{
EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags,
};
use intern::sym;
use rustc_pattern_analysis::{
IndexVec, PatCx, PrivateUninhabitedField,
@ -118,7 +120,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
let is_local = adt.krate(self.db) == self.module.krate();
!is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
!is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE)
}
fn variant_id_for_adt(

View file

@ -42,7 +42,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe
let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() };
let body = db.body(def);
let infer = db.infer(def);
let infer = InferenceResult::for_body(db, def);
let mut callback = |diag| match diag {
UnsafeDiagnostic::UnsafeOperation { node, inside_unsafe_block, reason } => {
if inside_unsafe_block == InsideUnsafeBlock::No {
@ -55,7 +55,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe
}
}
};
let mut visitor = UnsafeVisitor::new(db, &infer, &body, def, &mut callback);
let mut visitor = UnsafeVisitor::new(db, infer, &body, def, &mut callback);
visitor.walk_expr(body.body_expr);
if !is_unsafe {
@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> {
inside_assignment: bool,
inside_union_destructure: bool,
callback: &'db mut dyn FnMut(UnsafeDiagnostic),
def_target_features: TargetFeatures,
def_target_features: TargetFeatures<'db>,
// FIXME: This needs to be the edition of the span of each call.
edition: Edition,
/// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> {
) -> Self {
let resolver = def.resolver(db);
let def_target_features = match def {
DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func),
_ => TargetFeatures::default(),
};
let krate = resolver.module().krate();

View file

@ -17,7 +17,7 @@ use hir_def::{
hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate},
item_scope::ItemInNs,
item_tree::FieldsShape,
lang_item::LangItem,
lang_item::LangItems,
nameres::DefMap,
signatures::VariantFields,
type_ref::{
@ -47,7 +47,7 @@ use stdx::never;
use triomphe::Arc;
use crate::{
CallableDefId, FnAbi, ImplTraitId, MemoryMap, TraitEnvironment, consteval,
CallableDefId, FnAbi, ImplTraitId, InferenceResult, MemoryMap, TraitEnvironment, consteval,
db::{HirDatabase, InternedClosure, InternedCoroutine},
generics::generics,
layout::Layout,
@ -61,7 +61,7 @@ use crate::{
infer::{DbInternerInferExt, traits::ObligationCause},
},
primitive,
utils::{self, detect_variant_from_bytes},
utils::{detect_variant_from_bytes, fn_traits},
};
pub trait HirWrite: fmt::Write {
@ -309,8 +309,7 @@ pub trait HirDisplay<'db> {
allow_opaque: bool,
) -> Result<String, DisplaySourceCodeError> {
let mut result = String::new();
let interner =
DbInterner::new_with(db, Some(module_id.krate()), module_id.containing_block());
let interner = DbInterner::new_with(db, module_id.krate());
match self.hir_fmt(&mut HirFormatter {
db,
interner,
@ -392,6 +391,11 @@ impl<'db> HirFormatter<'_, 'db> {
self.display_target.edition
}
#[inline]
pub fn lang_items(&self) -> &'db LangItems {
self.interner.lang_items()
}
pub fn write_joined<T: HirDisplay<'db>>(
&mut self,
iter: impl IntoIterator<Item = T>,
@ -540,11 +544,7 @@ pub enum ClosureStyle {
impl<'db, T: HirDisplay<'db>> HirDisplayWrapper<'_, 'db, T> {
pub fn write_to<F: HirWrite>(&self, f: &mut F) -> Result<(), HirDisplayError> {
let krate = self.display_target.krate;
let block = match self.display_kind {
DisplayKind::SourceCode { target_module_id, .. } => target_module_id.containing_block(),
DisplayKind::Diagnostics | DisplayKind::Test => None,
};
let interner = DbInterner::new_with(self.db, Some(krate), block);
let interner = DbInterner::new_with(self.db, krate);
self.t.hir_fmt(&mut HirFormatter {
db: self.db,
interner,
@ -1102,7 +1102,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
bounds.iter().any(|bound| match bound.skip_binder() {
ExistentialPredicate::Trait(trait_ref) => {
let trait_ = trait_ref.def_id.0;
fn_traits(db, trait_).any(|it| it == trait_)
fn_traits(f.lang_items()).any(|it| it == trait_)
}
_ => false,
});
@ -1146,7 +1146,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
let contains_impl_fn = bounds().any(|bound| {
if let ClauseKind::Trait(trait_ref) = bound.kind().skip_binder() {
let trait_ = trait_ref.def_id().0;
fn_traits(db, trait_).any(|it| it == trait_)
fn_traits(f.lang_items()).any(|it| it == trait_)
} else {
false
}
@ -1394,7 +1394,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
if let Some(sig) = sig {
let sig = sig.skip_binder();
let InternedClosure(def, _) = db.lookup_intern_closure(id);
let infer = db.infer(def);
let infer = InferenceResult::for_body(db, def);
let (_, kind) = infer.closure_info(id);
match f.closure_style {
ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
@ -1588,8 +1588,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
..
}
| hir_def::hir::Expr::Async { .. } => {
let future_trait =
LangItem::Future.resolve_trait(db, owner.module(db).krate());
let future_trait = f.lang_items().Future;
let output = future_trait.and_then(|t| {
t.trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))
@ -1799,11 +1798,6 @@ impl<'db> HirDisplay<'db> for Term<'db> {
}
}
fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> + '_ {
let krate = trait_.lookup(db).container.krate();
utils::fn_traits(db, krate)
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum SizedByDefault {
NotSized,
@ -1815,7 +1809,7 @@ impl SizedByDefault {
match self {
Self::NotSized => false,
Self::Sized { anchor } => {
let sized_trait = LangItem::Sized.resolve_trait(db, anchor);
let sized_trait = hir_def::lang_item::lang_items(db, anchor).Sized;
Some(trait_) == sized_trait
}
}
@ -1868,7 +1862,7 @@ fn write_bounds_like_dyn_trait<'db>(
}
}
if !is_fn_trait {
is_fn_trait = fn_traits(f.db, trait_).any(|it| it == trait_);
is_fn_trait = fn_traits(f.lang_items()).any(|it| it == trait_);
}
if !is_fn_trait && angle_open {
write!(f, ">")?;
@ -1966,7 +1960,7 @@ fn write_bounds_like_dyn_trait<'db>(
write!(f, ">")?;
}
if let SizedByDefault::Sized { anchor } = default_sized {
let sized_trait = LangItem::Sized.resolve_trait(f.db, anchor);
let sized_trait = hir_def::lang_item::lang_items(f.db, anchor).Sized;
if !is_sized {
if !first {
write!(f, " + ")?;

View file

@ -1,29 +1,29 @@
//! Utilities for computing drop info about types.
use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
use hir_def::{AdtId, signatures::StructFlags};
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use stdx::never;
use triomphe::Arc;
use crate::{
TraitEnvironment, consteval,
db::HirDatabase,
InferenceResult, TraitEnvironment, consteval,
method_resolution::TraitImpls,
next_solver::{
SimplifiedType, Ty, TyKind,
DbInterner, SimplifiedType, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
};
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
fn has_destructor(interner: DbInterner<'_>, adt: AdtId) -> bool {
let db = interner.db;
let module = match adt {
AdtId::EnumId(id) => db.lookup_intern_enum(id).container,
AdtId::StructId(id) => db.lookup_intern_struct(id).container,
AdtId::UnionId(id) => db.lookup_intern_union(id).container,
};
let Some(drop_trait) = LangItem::Drop.resolve_trait(db, module.krate()) else {
let Some(drop_trait) = interner.lang_items().Drop else {
return false;
};
let impls = match module.containing_block() {
@ -73,7 +73,7 @@ fn has_drop_glue_impl<'db>(
match ty.kind() {
TyKind::Adt(adt_def, subst) => {
let adt_id = adt_def.def_id().0;
if has_destructor(db, adt_id) {
if has_destructor(infcx.interner, adt_id) {
return DropGlue::HasDropGlue;
}
match adt_id {
@ -137,7 +137,7 @@ fn has_drop_glue_impl<'db>(
TyKind::Slice(ty) => has_drop_glue_impl(infcx, ty, env, visited),
TyKind::Closure(closure_id, subst) => {
let owner = db.lookup_intern_closure(closure_id.0).0;
let infer = db.infer(owner);
let infer = InferenceResult::for_body(db, owner);
let (captures, _) = infer.closure_info(closure_id.0);
let env = db.trait_environment_for_body(owner);
captures

View file

@ -5,7 +5,7 @@ use std::ops::ControlFlow;
use hir_def::{
AssocItemId, ConstId, CrateRootModuleId, FunctionId, GenericDefId, HasModule, TraitId,
TypeAliasId, TypeOrConstParamId, TypeParamId, hir::generics::LocalTypeOrConstParamId,
lang_item::LangItem, signatures::TraitFlags,
signatures::TraitFlags,
};
use rustc_hash::FxHashSet;
use rustc_type_ir::{
@ -53,7 +53,7 @@ pub fn dyn_compatibility(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Option<DynCompatibilityViolation> {
let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
let interner = DbInterner::new_no_crate(db);
for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()) {
if let Some(v) = db.dyn_compatibility_of_trait(super_trait.0) {
return if super_trait.0 == trait_ {
@ -75,7 +75,7 @@ pub fn dyn_compatibility_with_callback<F>(
where
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
{
let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
let interner = DbInterner::new_no_crate(db);
for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()).skip(1) {
if db.dyn_compatibility_of_trait(super_trait.0).is_some() {
cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
@ -131,11 +131,11 @@ pub fn dyn_compatibility_of_trait_query(
pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
let krate = def.module(db).krate();
let Some(sized) = LangItem::Sized.resolve_trait(db, krate) else {
let interner = DbInterner::new_with(db, krate);
let Some(sized) = interner.lang_items().Sized else {
return false;
};
let interner = DbInterner::new_with(db, Some(krate), None);
let predicates = GenericPredicates::query_explicit(db, def);
// FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to
// rust-analyzer yet
@ -234,34 +234,34 @@ fn contains_illegal_self_type_reference<'db, T: rustc_type_ir::TypeVisitable<DbI
&mut self,
ty: <DbInterner<'db> as rustc_type_ir::Interner>::Ty,
) -> Self::Result {
let interner = DbInterner::new_with(self.db, None, None);
let interner = DbInterner::new_no_crate(self.db);
match ty.kind() {
rustc_type_ir::TyKind::Param(param) if param.index == 0 => ControlFlow::Break(()),
rustc_type_ir::TyKind::Param(_) => ControlFlow::Continue(()),
rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => match self
.allow_self_projection
{
AllowSelfProjection::Yes => {
let trait_ = proj.trait_def_id(DbInterner::new_with(self.db, None, None));
let trait_ = match trait_ {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
if self.super_traits.is_none() {
self.super_traits = Some(
elaborate::supertrait_def_ids(interner, self.trait_.into())
.map(|super_trait| super_trait.0)
.collect(),
)
}
if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
ControlFlow::Continue(())
} else {
ty.super_visit_with(self)
rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => {
match self.allow_self_projection {
AllowSelfProjection::Yes => {
let trait_ = proj.trait_def_id(interner);
let trait_ = match trait_ {
SolverDefId::TraitId(id) => id,
_ => unreachable!(),
};
if self.super_traits.is_none() {
self.super_traits = Some(
elaborate::supertrait_def_ids(interner, self.trait_.into())
.map(|super_trait| super_trait.0)
.collect(),
)
}
if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
ControlFlow::Continue(())
} else {
ty.super_visit_with(self)
}
}
AllowSelfProjection::No => ty.super_visit_with(self),
}
AllowSelfProjection::No => ty.super_visit_with(self),
},
}
_ => ty.super_visit_with(self),
}
}
@ -401,7 +401,8 @@ fn receiver_is_dispatchable<'db>(
) -> bool {
let sig = sig.instantiate_identity();
let interner: DbInterner<'_> = DbInterner::new_with(db, Some(trait_.krate(db)), None);
let module = trait_.module(db);
let interner = DbInterner::new_with(db, module.krate());
let self_param_id = TypeParamId::from_unchecked(TypeOrConstParamId {
parent: trait_.into(),
local_id: LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)),
@ -419,16 +420,13 @@ fn receiver_is_dispatchable<'db>(
return false;
};
let krate = func.module(db).krate();
let traits = (
LangItem::Unsize.resolve_trait(db, krate),
LangItem::DispatchFromDyn.resolve_trait(db, krate),
);
let lang_items = interner.lang_items();
let traits = (lang_items.Unsize, lang_items.DispatchFromDyn);
let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else {
return false;
};
let meta_sized_did = LangItem::MetaSized.resolve_trait(db, krate);
let meta_sized_did = lang_items.MetaSized;
let Some(meta_sized_did) = meta_sized_did else {
return false;
};

View file

@ -37,10 +37,10 @@ use hir_def::{
ItemContainerId, LocalFieldId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
lang_item::{LangItem, LangItemTarget, lang_item},
lang_item::LangItems,
layout::Integer,
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
signatures::{ConstSignature, StaticSignature},
signatures::{ConstSignature, EnumSignature, StaticSignature},
type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId},
};
use hir_expand::{mod_path::ModPath, name::Name};
@ -54,9 +54,10 @@ use rustc_type_ir::{
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
};
use salsa::Update;
use span::Edition;
use stdx::never;
use triomphe::Arc;
use thin_vec::ThinVec;
use crate::{
ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures,
@ -94,7 +95,7 @@ use cast::{CastCheck, CastError};
pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult<'_>> {
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> {
let _p = tracing::info_span!("infer_query").entered();
let resolver = def.resolver(db);
let body = db.body(def);
@ -107,7 +108,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() {
ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
hir_def::layout::IntegerType::Pointer(signed) => match signed {
true => ctx.types.isize,
false => ctx.types.usize,
@ -158,17 +159,14 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
ctx.handle_opaque_type_uses();
Arc::new(ctx.resolve_all())
ctx.resolve_all()
}
pub(crate) fn infer_cycle_result(
db: &dyn HirDatabase,
_: DefWithBodyId,
) -> Arc<InferenceResult<'_>> {
Arc::new(InferenceResult {
fn infer_cycle_result(db: &dyn HirDatabase, _: DefWithBodyId) -> InferenceResult<'_> {
InferenceResult {
has_errors: true,
..InferenceResult::new(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed))
})
..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed))
}
}
/// Binding modes inferred for patterns.
@ -198,7 +196,7 @@ pub enum InferenceTyDiagnosticSource {
Signature,
}
#[derive(Debug, PartialEq, Eq, Clone)]
#[derive(Debug, PartialEq, Eq, Clone, Update)]
pub enum InferenceDiagnostic<'db> {
NoSuchField {
field: ExprOrPatId,
@ -292,7 +290,7 @@ pub enum InferenceDiagnostic<'db> {
}
/// A mismatch between an expected and an inferred type.
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)]
pub struct TypeMismatch<'db> {
pub expected: Ty<'db>,
pub actual: Ty<'db>,
@ -338,7 +336,7 @@ pub struct TypeMismatch<'db> {
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)]
pub struct Adjustment<'db> {
#[type_visitable(ignore)]
#[type_foldable(identity)]
@ -475,9 +473,10 @@ pub enum PointerCast {
/// When you add a field that stores types (including `Substitution` and the like), don't forget
/// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must
/// not appear in the final inference result.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Update)]
pub struct InferenceResult<'db> {
/// For each method call expr, records the function it resolves to.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
method_resolutions: FxHashMap<ExprId, (FunctionId, GenericArgs<'db>)>,
/// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>,
@ -488,28 +487,41 @@ pub struct InferenceResult<'db> {
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
tuple_field_access_types: FxHashMap<TupleId, Tys<'db>>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
diagnostics: Vec<InferenceDiagnostic<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
tuple_field_access_types: ThinVec<Tys<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) type_of_expr: ArenaMap<ExprId, Ty<'db>>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))]
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
pub(crate) type_of_type_placeholder: ArenaMap<TypeRefId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))]
pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch<'db>>>>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
// `TyKind::Error`.
// Which will then mark this field.
pub(crate) has_errors: bool,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
diagnostics: ThinVec<InferenceDiagnostic<'db>>,
/// Interned `Error` type to return references to.
// FIXME: Remove this.
error_ty: Ty<'db>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty<'db>>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
@ -525,13 +537,22 @@ pub struct InferenceResult<'db> {
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem<'db>>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
pub(crate) coercion_casts: FxHashSet<ExprId>,
}
#[salsa::tracked]
impl<'db> InferenceResult<'db> {
#[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)]
pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> {
infer_query(db, def)
}
}
impl<'db> InferenceResult<'db> {
fn new(error_ty: Ty<'db>) -> Self {
Self {
@ -595,25 +616,31 @@ impl<'db> InferenceResult<'db> {
}
}
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> {
self.type_mismatches.get(&expr.into())
self.type_mismatches.as_deref()?.get(&expr.into())
}
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> {
self.type_mismatches.get(&pat.into())
self.type_mismatches.as_deref()?.get(&pat.into())
}
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch<'db>)> {
self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
self.type_mismatches
.as_deref()
.into_iter()
.flatten()
.map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
}
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch<'db>)> {
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
_ => None,
})
self.type_mismatches.as_deref().into_iter().flatten().filter_map(
|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
_ => None,
},
)
}
pub fn placeholder_types(&self) -> impl Iterator<Item = (TypeRefId, &Ty<'db>)> {
self.type_of_type_placeholder.iter()
self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty))
}
pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(type_ref).copied()
self.type_of_type_placeholder.get(&type_ref).copied()
}
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) {
self.closure_info.get(&closure).unwrap()
@ -659,7 +686,7 @@ impl<'db> InferenceResult<'db> {
}
pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> {
self.tuple_field_access_types[&id]
self.tuple_field_access_types[id.0 as usize]
}
pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> {
@ -816,11 +843,12 @@ pub(crate) struct InferenceContext<'body, 'db> {
/// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
pub(crate) unstable_features: MethodResolutionUnstableFeatures,
pub(crate) edition: Edition,
pub(crate) generic_def: GenericDefId,
pub(crate) table: unify::InferenceTable<'db>,
pub(crate) lang_items: &'db LangItems,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult<'db>,
@ -926,6 +954,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
unstable_features: MethodResolutionUnstableFeatures::from_def_map(
resolver.top_level_def_map(),
),
lang_items: table.interner().lang_items(),
edition: resolver.krate().data(db).edition,
table,
tuple_field_accesses_rev: Default::default(),
@ -960,12 +989,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.resolver.krate()
}
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
let target_features = match self.owner {
DefWithBodyId::FunctionId(id) => {
TargetFeatures::from_attrs(&self.db.attrs(id.into()))
}
DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(self.db, id),
_ => TargetFeatures::default(),
};
let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {
@ -1063,13 +1090,14 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
type_of_type_placeholder.shrink_to_fit();
type_of_opaque.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
for mismatch in (*type_mismatches).values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected);
mismatch.actual = table.resolve_completely(mismatch.actual);
if let Some(type_mismatches) = type_mismatches {
*has_errors = true;
for mismatch in type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected);
mismatch.actual = table.resolve_completely(mismatch.actual);
}
type_mismatches.shrink_to_fit();
}
type_mismatches.shrink_to_fit();
diagnostics.retain_mut(|diagnostic| {
use InferenceDiagnostic::*;
match diagnostic {
@ -1121,9 +1149,8 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
pat_adjustments.shrink_to_fit();
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.enumerate()
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
.inspect(|(_, subst)| {
.map(|subst| table.resolve_completely(subst))
.inspect(|subst| {
*has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error());
})
.collect();
@ -1520,7 +1547,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
) -> Result<(), ()> {
let result = self.demand_eqtype_fixme_no_diag(expected, actual);
if result.is_err() {
self.result.type_mismatches.insert(id, TypeMismatch { expected, actual });
self.result
.type_mismatches
.get_or_insert_default()
.insert(id, TypeMismatch { expected, actual });
}
result
}
@ -1837,21 +1867,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
}
fn resolve_lang_item(&self, item: LangItem) -> Option<LangItemTarget> {
let krate = self.resolver.krate();
lang_item(self.db, krate, item)
}
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
}
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self
.resolve_lang_item(LangItem::IntoFutureIntoFuture)?
.as_function()?
.lookup(self.db)
.container
let ItemContainerId::TraitId(trait_) =
self.lang_items.IntoFutureIntoFuture?.lookup(self.db).container
else {
return None;
};
@ -1859,42 +1881,42 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn resolve_boxed_box(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::OwnedBox)?.as_struct()?;
let struct_ = self.lang_items.OwnedBox?;
Some(struct_.into())
}
fn resolve_range_full(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::RangeFull)?.as_struct()?;
let struct_ = self.lang_items.RangeFull?;
Some(struct_.into())
}
fn resolve_range(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::Range)?.as_struct()?;
let struct_ = self.lang_items.Range?;
Some(struct_.into())
}
fn resolve_range_inclusive(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::RangeInclusiveStruct)?.as_struct()?;
let struct_ = self.lang_items.RangeInclusiveStruct?;
Some(struct_.into())
}
fn resolve_range_from(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::RangeFrom)?.as_struct()?;
let struct_ = self.lang_items.RangeFrom?;
Some(struct_.into())
}
fn resolve_range_to(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::RangeTo)?.as_struct()?;
let struct_ = self.lang_items.RangeTo?;
Some(struct_.into())
}
fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::RangeToInclusive)?.as_struct()?;
let struct_ = self.lang_items.RangeToInclusive?;
Some(struct_.into())
}
fn resolve_va_list(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
let struct_ = self.lang_items.VaList?;
Some(struct_.into())
}

View file

@ -7,7 +7,6 @@ use std::{iter, mem, ops::ControlFlow};
use hir_def::{
TraitId,
hir::{ClosureKind, ExprId, PatId},
lang_item::LangItem,
type_ref::TypeRefId,
};
use rustc_type_ir::{
@ -220,11 +219,12 @@ impl<'db> InferenceContext<'_, 'db> {
}
fn fn_trait_kind_from_def_id(&self, trait_id: TraitId) -> Option<rustc_type_ir::ClosureKind> {
let lang_item = self.db.lang_attr(trait_id.into())?;
match lang_item {
LangItem::Fn => Some(rustc_type_ir::ClosureKind::Fn),
LangItem::FnMut => Some(rustc_type_ir::ClosureKind::FnMut),
LangItem::FnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
match trait_id {
_ if self.lang_items.Fn == Some(trait_id) => Some(rustc_type_ir::ClosureKind::Fn),
_ if self.lang_items.FnMut == Some(trait_id) => Some(rustc_type_ir::ClosureKind::FnMut),
_ if self.lang_items.FnOnce == Some(trait_id) => {
Some(rustc_type_ir::ClosureKind::FnOnce)
}
_ => None,
}
}
@ -233,11 +233,14 @@ impl<'db> InferenceContext<'_, 'db> {
&self,
trait_id: TraitId,
) -> Option<rustc_type_ir::ClosureKind> {
let lang_item = self.db.lang_attr(trait_id.into())?;
match lang_item {
LangItem::AsyncFn => Some(rustc_type_ir::ClosureKind::Fn),
LangItem::AsyncFnMut => Some(rustc_type_ir::ClosureKind::FnMut),
LangItem::AsyncFnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
match trait_id {
_ if self.lang_items.AsyncFn == Some(trait_id) => Some(rustc_type_ir::ClosureKind::Fn),
_ if self.lang_items.AsyncFnMut == Some(trait_id) => {
Some(rustc_type_ir::ClosureKind::FnMut)
}
_ if self.lang_items.AsyncFnOnce == Some(trait_id) => {
Some(rustc_type_ir::ClosureKind::FnOnce)
}
_ => None,
}
}
@ -433,21 +436,20 @@ impl<'db> InferenceContext<'_, 'db> {
projection: PolyProjectionPredicate<'db>,
) -> Option<PolyFnSig<'db>> {
let SolverDefId::TypeAliasId(def_id) = projection.item_def_id() else { unreachable!() };
let lang_item = self.db.lang_attr(def_id.into());
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
// for closures and async closures, respectively.
match closure_kind {
ClosureKind::Closure if lang_item == Some(LangItem::FnOnceOutput) => {
ClosureKind::Closure if Some(def_id) == self.lang_items.FnOnceOutput => {
self.extract_sig_from_projection(projection)
}
ClosureKind::Async if lang_item == Some(LangItem::AsyncFnOnceOutput) => {
ClosureKind::Async if Some(def_id) == self.lang_items.AsyncFnOnceOutput => {
self.extract_sig_from_projection(projection)
}
// It's possible we've passed the closure to a (somewhat out-of-fashion)
// `F: FnOnce() -> Fut, Fut: Future<Output = T>` style bound. Let's still
// guide inference here, since it's beneficial for the user.
ClosureKind::Async if lang_item == Some(LangItem::FnOnceOutput) => {
ClosureKind::Async if Some(def_id) == self.lang_items.FnOnceOutput => {
self.extract_sig_from_projection_and_future_bound(projection)
}
_ => None,
@ -538,7 +540,7 @@ impl<'db> InferenceContext<'_, 'db> {
&& let ret_projection = bound.predicate.kind().rebind(ret_projection)
&& let Some(ret_projection) = ret_projection.no_bound_vars()
&& let SolverDefId::TypeAliasId(assoc_type) = ret_projection.def_id()
&& self.db.lang_attr(assoc_type.into()) == Some(LangItem::FutureOutput)
&& Some(assoc_type) == self.lang_items.FutureOutput
{
return_ty = Some(ret_projection.term.expect_type());
break;

View file

@ -31,10 +31,10 @@ use crate::{
// The below functions handle capture and closure kind (Fn, FnMut, ..)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub(crate) struct HirPlace<'db> {
pub(crate) local: BindingId,
pub(crate) projections: Vec<ProjectionElem<Infallible, Ty<'db>>>,
pub(crate) projections: Vec<ProjectionElem<'db, Infallible>>,
}
impl<'db> HirPlace<'db> {
@ -76,7 +76,7 @@ pub enum CaptureKind {
ByValue,
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct CapturedItem<'db> {
pub(crate) place: HirPlace<'db>,
pub(crate) kind: CaptureKind,
@ -87,6 +87,7 @@ pub struct CapturedItem<'db> {
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
#[update(unsafe(with(crate::utils::unsafe_update_eq)))]
pub(crate) ty: EarlyBinder<'db, Ty<'db>>,
}
@ -101,7 +102,7 @@ impl<'db> CapturedItem<'db> {
}
pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args)
}

View file

@ -37,11 +37,10 @@
use hir_def::{
CallableDefId,
attrs::AttrFlags,
hir::{ExprId, ExprOrPatId},
lang_item::LangItem,
signatures::FunctionSignature,
};
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
@ -79,7 +78,7 @@ use crate::{
trait CoerceDelegate<'db> {
fn infcx(&self) -> &InferCtxt<'db>;
fn env(&self) -> &TraitEnvironment<'db>;
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget);
fn set_diverging(&mut self, diverging_ty: Ty<'db>);
@ -612,10 +611,8 @@ where
return Err(TypeError::Mismatch);
}
let traits = (
LangItem::Unsize.resolve_trait(self.db(), self.env().krate),
LangItem::CoerceUnsized.resolve_trait(self.db(), self.env().krate),
);
let lang_items = self.interner().lang_items();
let traits = (lang_items.Unsize, lang_items.CoerceUnsized);
let (Some(unsize_did), Some(coerce_unsized_did)) = traits else {
debug!("missing Unsize or CoerceUnsized traits");
return Err(TypeError::Mismatch);
@ -855,14 +852,14 @@ where
return Err(TypeError::IntrinsicCast);
}
let attrs = self.db().attrs(def_id.into());
if attrs.by_key(sym::rustc_force_inline).exists() {
let attrs = AttrFlags::query(self.db(), def_id.into());
if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) {
return Err(TypeError::ForceInlineCast);
}
if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
let fn_target_features =
TargetFeatures::from_attrs_no_implications(&attrs);
TargetFeatures::from_fn_no_implications(self.db(), def_id);
// Allow the coercion if the current function has all the features that would be
// needed to call the coercee safely.
let (target_features, target_feature_is_safe) =
@ -981,8 +978,9 @@ impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
&self.0.table.trait_env
}
#[inline]
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
self.0.target_features()
}
@ -1075,7 +1073,7 @@ impl<'db> InferenceContext<'_, 'db> {
let is_force_inline = |ty: Ty<'db>| {
if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE)
} else {
false
}
@ -1514,7 +1512,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
self.final_ty = Some(icx.types.error);
icx.result.type_mismatches.insert(
icx.result.type_mismatches.get_or_insert_default().insert(
expression.into(),
if label_expression_as_expected {
TypeMismatch { expected: found, actual: expected }
@ -1551,7 +1549,7 @@ pub fn could_coerce<'db>(
struct HirCoercionDelegate<'a, 'db> {
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
target_features: &'a TargetFeatures,
target_features: &'a TargetFeatures<'db>,
}
impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
@ -1563,7 +1561,7 @@ impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
self.env
}
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
(self.target_features, TargetFeatureIsSafeInTarget::No)
}
fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}
@ -1578,7 +1576,7 @@ fn coerce<'db>(
env: Arc<TraitEnvironment<'db>>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);

View file

@ -11,6 +11,7 @@ use hir_def::expr_store::ExpressionStore;
use hir_def::expr_store::path::Path;
use hir_def::{hir::ExprOrPatId, resolver::Resolver};
use la_arena::{Idx, RawIdx};
use thin_vec::ThinVec;
use crate::{
InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnostic,
@ -24,7 +25,7 @@ use crate::{
// to our resolver and so we cannot have mutable reference, but we really want to have
// ability to dispatch diagnostics during this work otherwise the code becomes a complete mess.
#[derive(Debug, Default, Clone)]
pub(super) struct Diagnostics<'db>(RefCell<Vec<InferenceDiagnostic<'db>>>);
pub(super) struct Diagnostics<'db>(RefCell<ThinVec<InferenceDiagnostic<'db>>>);
impl<'db> Diagnostics<'db> {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) {
@ -41,7 +42,7 @@ impl<'db> Diagnostics<'db> {
);
}
pub(super) fn finish(self) -> Vec<InferenceDiagnostic<'db>> {
pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic<'db>> {
self.0.into_inner()
}
}

View file

@ -10,7 +10,6 @@ use hir_def::{
Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
LabelId, Literal, Pat, PatId, Statement, UnaryOp,
},
lang_item::{LangItem, LangItemTarget},
resolver::ValueNs,
};
use hir_def::{FunctionId, hir::ClosureKind};
@ -71,6 +70,7 @@ impl<'db> InferenceContext<'_, 'db> {
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
}
}
@ -100,6 +100,7 @@ impl<'db> InferenceContext<'_, 'db> {
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: target, actual: ty });
target
}
@ -293,6 +294,7 @@ impl<'db> InferenceContext<'_, 'db> {
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
}
}
@ -874,14 +876,10 @@ impl<'db> InferenceContext<'_, 'db> {
Literal::CString(..) => Ty::new_ref(
self.interner(),
self.types.re_static,
self.resolve_lang_item(LangItem::CStr)
.and_then(LangItemTarget::as_struct)
.map_or_else(
|| self.err_ty(),
|strukt| {
Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args)
},
),
self.lang_items.CStr.map_or_else(
|| self.err_ty(),
|strukt| Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args),
),
Mutability::Not,
),
Literal::Char(..) => self.types.char,
@ -1188,6 +1186,7 @@ impl<'db> InferenceContext<'_, 'db> {
Err(_) => {
this.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty });
target
}
@ -1279,7 +1278,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
}
let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
let Some(trait_) = fn_x.get_id(self.lang_items) else {
return;
};
let trait_data = trait_.trait_items(self.db);
@ -1456,11 +1455,10 @@ impl<'db> InferenceContext<'_, 'db> {
) -> Ty<'db> {
let coerce_ty = expected.coercion_target_type(&mut self.table);
let g = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
let prev_state = block_id.map(|block_id| {
let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone();
TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
let prev_block = self.table.infer_ctxt.interner.block.replace(block_id);
(prev_env, prev_block)
prev_env
});
let (break_ty, ty) =
@ -1556,7 +1554,7 @@ impl<'db> InferenceContext<'_, 'db> {
)
.is_err()
{
this.result.type_mismatches.insert(
this.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: t, actual: this.types.unit },
);
@ -1568,9 +1566,8 @@ impl<'db> InferenceContext<'_, 'db> {
}
});
self.resolver.reset_to_guard(g);
if let Some((prev_env, prev_block)) = prev_state {
if let Some(prev_env) = prev_env {
self.table.trait_env = prev_env;
self.table.infer_ctxt.interner.block = prev_block;
}
break_ty.unwrap_or(ty)
@ -2130,6 +2127,7 @@ impl<'db> InferenceContext<'_, 'db> {
// Don't report type mismatches if there is a mismatch in args count.
self.result
.type_mismatches
.get_or_insert_default()
.insert((*arg).into(), TypeMismatch { expected, actual: found });
}
}
@ -2188,9 +2186,11 @@ impl<'db> InferenceContext<'_, 'db> {
};
let data = self.db.function_signature(func);
let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
else {
return Default::default();
};
let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
// only use legacy const generics if the param count matches with them
if data.params.len() + legacy_const_generics_indices.len() != args.len() {
@ -2199,9 +2199,8 @@ impl<'db> InferenceContext<'_, 'db> {
} else {
// there are more parameters than there should be without legacy
// const params; use them
let mut indices = legacy_const_generics_indices.as_ref().clone();
indices.sort();
return indices;
legacy_const_generics_indices.sort_unstable();
return legacy_const_generics_indices;
}
}
@ -2214,9 +2213,8 @@ impl<'db> InferenceContext<'_, 'db> {
self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
// FIXME: evaluate and unify with the const
}
let mut indices = legacy_const_generics_indices.as_ref().clone();
indices.sort();
indices
legacy_const_generics_indices.sort_unstable();
legacy_const_generics_indices
}
pub(super) fn with_breakable_ctx<T>(

View file

@ -18,7 +18,6 @@ impl<'db> InferenceContext<'_, 'db> {
}
fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
let krate = self.krate();
if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) {
let mut adjustments = adjustments.iter_mut().rev().peekable();
while let Some(adj) = adjustments.next() {
@ -32,7 +31,6 @@ impl<'db> InferenceContext<'_, 'db> {
};
if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
&self.table,
krate,
source_ty,
None,
PlaceOp::Deref,

View file

@ -2,7 +2,7 @@
use std::collections::hash_map;
use hir_def::{GenericParamId, TraitId, hir::ExprId, lang_item::LangItem};
use hir_def::{GenericParamId, TraitId, hir::ExprId};
use intern::{Symbol, sym};
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{IntoKind, Ty as _};
@ -355,17 +355,18 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
fn lang_item_for_bin_op(&self, op: BinaryOp) -> (Symbol, Option<TraitId>) {
let (method_name, trait_lang_item) =
crate::lang_items::lang_items_for_bin_op(op).expect("invalid operator provided");
(method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
crate::lang_items::lang_items_for_bin_op(self.lang_items, op)
.expect("invalid operator provided");
(method_name, trait_lang_item)
}
fn lang_item_for_unop(&self, op: UnaryOp) -> (Symbol, Option<TraitId>) {
let (method_name, trait_lang_item) = match op {
UnaryOp::Not => (sym::not, LangItem::Not),
UnaryOp::Neg => (sym::neg, LangItem::Neg),
UnaryOp::Not => (sym::not, self.lang_items.Not),
UnaryOp::Neg => (sym::neg, self.lang_items.Neg),
UnaryOp::Deref => panic!("Deref is not overloadable"),
};
(method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
(method_name, trait_lang_item)
}
}

View file

@ -331,7 +331,7 @@ impl<'db> InferenceContext<'_, 'db> {
return self.pat_ty_after_adjustment(pat);
}
Err(_) => {
self.result.type_mismatches.insert(
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected, actual: ty_inserted_vars },
);
@ -415,6 +415,7 @@ impl<'db> InferenceContext<'_, 'db> {
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: lhs_ty });
// `rhs_ty` is returned so no further type mismatches are
// reported because of this mismatch.
@ -431,7 +432,10 @@ impl<'db> InferenceContext<'_, 'db> {
let ty = self.insert_type_vars_shallow(ty);
// FIXME: This never check is odd, but required with out we do inference right now
if !expected.is_never() && !self.unify(ty, expected) {
self.result.type_mismatches.insert(pat.into(), TypeMismatch { expected, actual: ty });
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: ty });
}
self.write_pat_ty(pat, ty);
self.pat_ty_after_adjustment(pat)

View file

@ -1,7 +1,6 @@
//! Inference of *place operators*: deref and indexing (operators that create places, as opposed to values).
use base_db::Crate;
use hir_def::{hir::ExprId, lang_item::LangItem};
use hir_def::hir::ExprId;
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{IntoKind, Ty as _};
@ -187,8 +186,8 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
debug!("try_overloaded_place_op({:?},{:?})", base_ty, op);
let (Some(imm_tr), imm_op) = (match op {
PlaceOp::Deref => (LangItem::Deref.resolve_trait(self.db, self.krate()), sym::deref),
PlaceOp::Index => (LangItem::Index.resolve_trait(self.db, self.krate()), sym::index),
PlaceOp::Deref => (self.lang_items.Deref, sym::deref),
PlaceOp::Index => (self.lang_items.Index, sym::index),
}) else {
// Bail if `Deref` or `Index` isn't defined.
return None;
@ -209,16 +208,16 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
pub(super) fn try_mutable_overloaded_place_op(
table: &InferenceTable<'db>,
krate: Crate,
base_ty: Ty<'db>,
opt_rhs_ty: Option<Ty<'db>>,
op: PlaceOp,
) -> Option<InferOk<'db, MethodCallee<'db>>> {
debug!("try_mutable_overloaded_place_op({:?},{:?})", base_ty, op);
let lang_items = table.interner().lang_items();
let (Some(mut_tr), mut_op) = (match op {
PlaceOp::Deref => (LangItem::DerefMut.resolve_trait(table.db, krate), sym::deref_mut),
PlaceOp::Index => (LangItem::IndexMut.resolve_trait(table.db, krate), sym::index_mut),
PlaceOp::Deref => (lang_items.DerefMut, sym::deref_mut),
PlaceOp::Index => (lang_items.IndexMut, sym::index_mut),
}) else {
// Bail if `DerefMut` or `IndexMut` isn't defined.
return None;
@ -276,8 +275,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
))
}
};
let method =
Self::try_mutable_overloaded_place_op(&self.table, self.krate(), base_ty, arg_ty, op);
let method = Self::try_mutable_overloaded_place_op(&self.table, base_ty, arg_ty, op);
let method = match method {
Some(ok) => self.table.register_infer_ok(ok),
// Couldn't find the mutable variant of the place op, keep the

View file

@ -2,7 +2,7 @@
use std::fmt;
use hir_def::{AdtId, DefWithBodyId, GenericParamId, lang_item::LangItem};
use hir_def::{AdtId, DefWithBodyId, GenericParamId};
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::FxHashSet;
@ -113,7 +113,7 @@ fn could_unify_impl<'db>(
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
select: for<'a> fn(&mut ObligationCtxt<'a, 'db>) -> Vec<NextSolverError<'db>>,
) -> bool {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let cause = ObligationCause::dummy();
let at = infcx.at(&cause, env.env);
@ -148,7 +148,7 @@ impl<'db> InferenceTable<'db> {
trait_env: Arc<TraitEnvironment<'db>>,
owner: Option<DefWithBodyId>,
) -> Self {
let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
let interner = DbInterner::new_with(db, trait_env.krate);
let typing_mode = match owner {
Some(owner) => TypingMode::typeck_for_body(interner, owner.into()),
// IDE things wants to reveal opaque types.
@ -174,7 +174,7 @@ impl<'db> InferenceTable<'db> {
}
pub(crate) fn type_var_is_sized(&self, self_ty: TyVid) -> bool {
let Some(sized_did) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
let Some(sized_did) = self.interner().lang_items().Sized else {
return true;
};
self.obligations_for_self_ty(self_ty).into_iter().any(|obligation| {
@ -520,13 +520,13 @@ impl<'db> InferenceTable<'db> {
ty: Ty<'db>,
num_args: usize,
) -> Option<(FnTrait, Vec<Ty<'db>>, Ty<'db>)> {
let lang_items = self.interner().lang_items();
for (fn_trait_name, output_assoc_name, subtraits) in [
(FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
(FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
(FnTrait::AsyncFnOnce, sym::CallOnceFuture, &[]),
] {
let krate = self.trait_env.krate;
let fn_trait = fn_trait_name.get_id(self.db, krate)?;
let fn_trait = fn_trait_name.get_id(lang_items)?;
let trait_data = fn_trait.trait_items(self.db);
let output_assoc_type =
trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
@ -558,7 +558,7 @@ impl<'db> InferenceTable<'db> {
self.register_obligation(pred);
let return_ty = self.normalize_alias_ty(projection);
for &fn_x in subtraits {
let fn_x_trait = fn_x.get_id(self.db, krate)?;
let fn_x_trait = fn_x.get_id(lang_items)?;
let trait_ref = TraitRef::new(self.interner(), fn_x_trait.into(), args);
let pred = Predicate::upcast_from(trait_ref, self.interner());
if !self.try_obligation(pred).no_solution() {
@ -658,7 +658,7 @@ impl<'db> InferenceTable<'db> {
}
}
let Some(sized) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
let Some(sized) = self.interner().lang_items().Sized else {
return false;
};
let sized_pred = Predicate::upcast_from(

View file

@ -1,6 +1,6 @@
//! Functions to detect special lang items
use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
use hir_def::{AdtId, TraitId, lang_item::LangItems, signatures::StructFlags};
use intern::{Symbol, sym};
use crate::db::HirDatabase;
@ -10,48 +10,51 @@ pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
db.struct_signature(id).flags.contains(StructFlags::IS_BOX)
}
pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Symbol, LangItem)> {
pub fn lang_items_for_bin_op(
lang_items: &LangItems,
op: syntax::ast::BinaryOp,
) -> Option<(Symbol, Option<TraitId>)> {
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (sym::add, LangItem::Add),
ArithOp::Mul => (sym::mul, LangItem::Mul),
ArithOp::Sub => (sym::sub, LangItem::Sub),
ArithOp::Div => (sym::div, LangItem::Div),
ArithOp::Rem => (sym::rem, LangItem::Rem),
ArithOp::Shl => (sym::shl, LangItem::Shl),
ArithOp::Shr => (sym::shr, LangItem::Shr),
ArithOp::BitXor => (sym::bitxor, LangItem::BitXor),
ArithOp::BitOr => (sym::bitor, LangItem::BitOr),
ArithOp::BitAnd => (sym::bitand, LangItem::BitAnd),
ArithOp::Add => (sym::add, lang_items.Add),
ArithOp::Mul => (sym::mul, lang_items.Mul),
ArithOp::Sub => (sym::sub, lang_items.Sub),
ArithOp::Div => (sym::div, lang_items.Div),
ArithOp::Rem => (sym::rem, lang_items.Rem),
ArithOp::Shl => (sym::shl, lang_items.Shl),
ArithOp::Shr => (sym::shr, lang_items.Shr),
ArithOp::BitXor => (sym::bitxor, lang_items.BitXor),
ArithOp::BitOr => (sym::bitor, lang_items.BitOr),
ArithOp::BitAnd => (sym::bitand, lang_items.BitAnd),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (sym::add_assign, LangItem::AddAssign),
ArithOp::Mul => (sym::mul_assign, LangItem::MulAssign),
ArithOp::Sub => (sym::sub_assign, LangItem::SubAssign),
ArithOp::Div => (sym::div_assign, LangItem::DivAssign),
ArithOp::Rem => (sym::rem_assign, LangItem::RemAssign),
ArithOp::Shl => (sym::shl_assign, LangItem::ShlAssign),
ArithOp::Shr => (sym::shr_assign, LangItem::ShrAssign),
ArithOp::BitXor => (sym::bitxor_assign, LangItem::BitXorAssign),
ArithOp::BitOr => (sym::bitor_assign, LangItem::BitOrAssign),
ArithOp::BitAnd => (sym::bitand_assign, LangItem::BitAndAssign),
ArithOp::Add => (sym::add_assign, lang_items.AddAssign),
ArithOp::Mul => (sym::mul_assign, lang_items.MulAssign),
ArithOp::Sub => (sym::sub_assign, lang_items.SubAssign),
ArithOp::Div => (sym::div_assign, lang_items.DivAssign),
ArithOp::Rem => (sym::rem_assign, lang_items.RemAssign),
ArithOp::Shl => (sym::shl_assign, lang_items.ShlAssign),
ArithOp::Shr => (sym::shr_assign, lang_items.ShrAssign),
ArithOp::BitXor => (sym::bitxor_assign, lang_items.BitXorAssign),
ArithOp::BitOr => (sym::bitor_assign, lang_items.BitOrAssign),
ArithOp::BitAnd => (sym::bitand_assign, lang_items.BitAndAssign),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (sym::eq, LangItem::PartialEq),
CmpOp::Eq { negated: true } => (sym::ne, LangItem::PartialEq),
CmpOp::Eq { negated: false } => (sym::eq, lang_items.PartialEq),
CmpOp::Eq { negated: true } => (sym::ne, lang_items.PartialEq),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(sym::le, LangItem::PartialOrd)
(sym::le, lang_items.PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(sym::lt, LangItem::PartialOrd)
(sym::lt, lang_items.PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(sym::ge, LangItem::PartialOrd)
(sym::ge, lang_items.PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(sym::gt, LangItem::PartialOrd)
(sym::gt, lang_items.PartialOrd)
}
},
BinaryOp::Assignment { op: None } => return None,

View file

@ -4,6 +4,7 @@ use std::fmt;
use hir_def::{
AdtId, LocalFieldId, StructId,
attrs::AttrFlags,
layout::{LayoutCalculatorError, LayoutData},
};
use la_arena::{Idx, RawIdx};
@ -20,7 +21,7 @@ use rustc_type_ir::{
use triomphe::Arc;
use crate::{
TraitEnvironment,
InferenceResult, TraitEnvironment,
consteval::try_const_usize,
db::HirDatabase,
next_solver::{
@ -143,7 +144,7 @@ fn layout_of_simd_ty<'db>(
let Some(TyKind::Array(e_ty, e_len)) = fields
.next()
.filter(|_| fields.next().is_none())
.map(|f| (*f.1).instantiate(DbInterner::new_with(db, None, None), args).kind())
.map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind())
else {
return Err(LayoutError::InvalidSimdType);
};
@ -161,7 +162,7 @@ pub fn layout_of_ty_query<'db>(
trait_env: Arc<TraitEnvironment<'db>>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let interner = DbInterner::new_with(db, Some(krate), trait_env.block);
let interner = DbInterner::new_with(db, krate);
let Ok(target) = db.target_data_layout(krate) else {
return Err(LayoutError::TargetLayoutNotAvailable);
};
@ -174,8 +175,7 @@ pub fn layout_of_ty_query<'db>(
TyKind::Adt(def, args) => {
match def.inner().id {
hir_def::AdtId::StructId(s) => {
let data = db.struct_signature(s);
let repr = data.repr.unwrap_or_default();
let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
}
@ -322,7 +322,7 @@ pub fn layout_of_ty_query<'db>(
}
TyKind::Closure(id, args) => {
let def = db.lookup_intern_closure(id.0);
let infer = db.infer(def.0);
let infer = InferenceResult::for_body(db, def.0);
let (captures, _) = infer.closure_info(id.0);
let fields = captures
.iter()
@ -401,7 +401,7 @@ fn field_ty<'a>(
fd: LocalFieldId,
args: &GenericArgs<'a>,
) -> Ty<'a> {
db.field_types(def)[fd].instantiate(DbInterner::new_with(db, None, None), args)
db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args)
}
fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {

View file

@ -4,9 +4,9 @@ use std::{cmp, ops::Bound};
use hir_def::{
AdtId, VariantId,
attrs::AttrFlags,
signatures::{StructFlags, VariantFields},
};
use intern::sym;
use rustc_abi::{Integer, ReprOptions, TargetDataLayout};
use rustc_index::IndexVec;
use smallvec::SmallVec;
@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>(
r.push(handle_variant(s.into(), s.fields(db))?);
(
r,
sig.repr.unwrap_or_default(),
AttrFlags::repr(db, s.into()).unwrap_or_default(),
sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
)
}
AdtId::UnionId(id) => {
let data = db.union_signature(id);
let repr = AttrFlags::repr(db, id.into());
let mut r = SmallVec::new();
r.push(handle_variant(id.into(), id.fields(db))?);
(r, data.repr.unwrap_or_default(), false)
(r, repr.unwrap_or_default(), false)
}
AdtId::EnumId(e) => {
let variants = e.enum_variants(db);
@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>(
.iter()
.map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
.collect::<Result<SmallVec<_>, _>>()?;
(r, db.enum_signature(e).repr.unwrap_or_default(), false)
(r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false)
}
};
let variants = variants
@ -105,27 +105,12 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
}
fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
let attrs = db.attrs(def.into());
let get = |name| {
let attr = attrs.by_key(name).tt_values();
for tree in attr {
if let Some(it) = tree.iter().next_as_view() {
let text = it.to_string().replace('_', "");
let (text, base) = match text.as_bytes() {
[b'0', b'x', ..] => (&text[2..], 16),
[b'0', b'o', ..] => (&text[2..], 8),
[b'0', b'b', ..] => (&text[2..], 2),
_ => (&*text, 10),
};
if let Ok(it) = u128::from_str_radix(text, base) {
return Bound::Included(it);
}
}
}
Bound::Unbounded
let range = AttrFlags::rustc_layout_scalar_valid_range(db, def);
let get = |value| match value {
Some(it) => Bound::Included(it),
None => Bound::Unbounded,
};
(get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
(get(range.start), get(range.end))
}
/// Finds the appropriate Integer type and signedness for the given

View file

@ -9,6 +9,7 @@ use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{
InferenceResult,
db::HirDatabase,
layout::{Layout, LayoutError},
next_solver::{DbInterner, GenericArgs},
@ -80,7 +81,7 @@ fn eval_goal(
})
.unwrap();
crate::attach_db(&db, || {
let interner = DbInterner::new_with(&db, None, None);
let interner = DbInterner::new_no_crate(&db);
let goal_ty = match adt_or_type_alias_id {
Either::Left(adt_id) => crate::next_solver::Ty::new_adt(
interner,
@ -136,7 +137,7 @@ fn eval_expr(
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
let infer = InferenceResult::for_body(&db, function_id.into());
let goal_ty = infer.type_of_binding[b];
db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
})

View file

@ -477,14 +477,14 @@ pub fn callable_sig_from_fn_trait<'db>(
trait_env: Arc<TraitEnvironment<'db>>,
db: &'db dyn HirDatabase,
) -> Option<(FnTrait, PolyFnSig<'db>)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let mut table = InferenceTable::new(db, trait_env.clone(), None);
let lang_items = table.interner().lang_items();
let fn_once_trait = FnTrait::FnOnce.get_id(lang_items)?;
let output_assoc_type = fn_once_trait
.trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
let mut table = InferenceTable::new(db, trait_env.clone(), None);
// Register two obligations:
// - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
@ -502,7 +502,7 @@ pub fn callable_sig_from_fn_trait<'db>(
table.register_obligation(pred);
let return_ty = table.normalize_alias_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(db, krate)?;
let fn_x_trait = fn_x.get_id(lang_items)?;
let trait_ref = TraitRef::new(table.interner(), fn_x_trait.into(), args);
if !table
.try_obligation(Predicate::upcast_from(trait_ref, table.interner()))

View file

@ -11,7 +11,6 @@ pub(crate) mod path;
use std::{cell::OnceCell, iter, mem};
use arrayvec::ArrayVec;
use base_db::Crate;
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
@ -24,7 +23,7 @@ use hir_def::{
GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
},
item_tree::FieldsShape,
lang_item::LangItem,
lang_item::LangItems,
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs},
signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
type_ref::{
@ -40,14 +39,17 @@ use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
use rustc_type_ir::{
AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate,
ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate,
ExistentialProjection, ExistentialTraitRef, FnSig, Interner, OutlivesPredicate, TermKind,
TyKind::{self},
TypeVisitableExt, Upcast,
inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
TypeFoldable, TypeVisitableExt, Upcast, UpcastFrom, elaborate,
inherent::{
Clause as _, GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike,
Ty as _,
},
};
use salsa::plumbing::AsId;
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
use tracing::debug;
use triomphe::{Arc, ThinArc};
use crate::{
@ -57,9 +59,9 @@ use crate::{
generics::{Generics, generics, trait_self_param_idx},
next_solver::{
AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const,
DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs,
ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef,
Ty, Tys, UnevaluatedConst, abi::Safety,
DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, FxIndexMap, GenericArg,
GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId,
TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, util::BottomUpFolder,
},
};
@ -166,6 +168,7 @@ impl<'db> LifetimeElisionKind<'db> {
pub struct TyLoweringContext<'db, 'a> {
pub db: &'db dyn HirDatabase,
interner: DbInterner<'db>,
lang_items: &'db LangItems,
resolver: &'a Resolver<'db>,
store: &'a ExpressionStore,
def: GenericDefId,
@ -191,9 +194,12 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
) -> Self {
let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed);
let in_binders = DebruijnIndex::ZERO;
let interner = DbInterner::new_with(db, resolver.krate());
Self {
db,
interner: DbInterner::new_with(db, Some(resolver.krate()), None),
// Can provide no block since we don't use it for trait solving.
interner,
lang_items: interner.lang_items(),
resolver,
def,
generics: Default::default(),
@ -490,7 +496,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
// away instead of two.
let actual_opaque_type_data = self
.with_debruijn(DebruijnIndex::ZERO, |ctx| {
ctx.lower_impl_trait(opaque_ty_id, bounds, self.resolver.krate())
ctx.lower_impl_trait(opaque_ty_id, bounds)
});
self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data;
@ -658,6 +664,8 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
ignore_bindings: bool,
) -> impl Iterator<Item = Clause<'db>> + use<'b, 'a, 'db> {
let interner = self.interner;
let meta_sized = self.lang_items.MetaSized;
let pointee_sized = self.lang_items.PointeeSized;
let mut assoc_bounds = None;
let mut clause = None;
match bound {
@ -666,10 +674,6 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
if let Some((trait_ref, mut ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
// FIXME(sized-hierarchy): Remove this bound modifications once we have implemented
// sized-hierarchy correctly.
let meta_sized = LangItem::MetaSized
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
let pointee_sized = LangItem::PointeeSized
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
if meta_sized.is_some_and(|it| it == trait_ref.def_id.0) {
// Ignore this bound
} else if pointee_sized.is_some_and(|it| it == trait_ref.def_id.0) {
@ -692,7 +696,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
}
&TypeBound::Path(path, TraitBoundModifier::Maybe) => {
let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate());
let sized_trait = self.lang_items.Sized;
// Don't lower associated type bindings as the only possible relaxed trait bound
// `?Sized` has no of them.
// If we got another trait here ignore the bound completely.
@ -721,138 +725,250 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
let interner = self.interner;
// FIXME: we should never create non-existential predicates in the first place
// For now, use an error type so we don't run into dummy binder issues
let self_ty = Ty::new_error(interner, ErrorGuaranteed);
let dummy_self_ty = dyn_trait_dummy_self(interner);
let mut region = None;
// INVARIANT: The principal trait bound, if present, must come first. Others may be in any
// order but should be in the same order for the same set but possibly different order of
// bounds in the input.
// INVARIANT: If this function returns `DynTy`, there should be at least one trait bound.
// These invariants are utilized by `TyExt::dyn_trait()` and chalk.
let mut lifetime = None;
let bounds = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
let mut lowered_bounds: Vec<
rustc_type_ir::Binder<DbInterner<'db>, ExistentialPredicate<DbInterner<'db>>>,
> = Vec::new();
let mut principal = None;
let mut auto_traits = SmallVec::<[_; 3]>::new();
let mut projections = Vec::new();
let mut had_error = false;
for b in bounds {
let db = ctx.db;
ctx.lower_type_bound(b, self_ty, false).for_each(|b| {
if let Some(bound) = b
.kind()
.map_bound(|c| match c {
rustc_type_ir::ClauseKind::Trait(t) => {
let id = t.def_id();
let is_auto =
db.trait_signature(id.0).flags.contains(TraitFlags::AUTO);
if is_auto {
Some(ExistentialPredicate::AutoTrait(t.def_id()))
} else {
Some(ExistentialPredicate::Trait(
ExistentialTraitRef::new_from_args(
interner,
t.def_id(),
GenericArgs::new_from_iter(
interner,
t.trait_ref.args.iter().skip(1),
),
),
))
ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|b| {
match b.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(t) => {
let id = t.def_id();
let is_auto = db.trait_signature(id.0).flags.contains(TraitFlags::AUTO);
if is_auto {
auto_traits.push(t.def_id().0);
} else {
if principal.is_some() {
// FIXME: Report an error.
had_error = true;
}
principal = Some(b.kind().rebind(t.trait_ref));
}
rustc_type_ir::ClauseKind::Projection(p) => {
Some(ExistentialPredicate::Projection(
ExistentialProjection::new_from_args(
interner,
p.def_id(),
GenericArgs::new_from_iter(
interner,
p.projection_term.args.iter().skip(1),
),
p.term,
),
))
}
rustc_type_ir::ClauseKind::Projection(p) => {
projections.push(b.kind().rebind(p));
}
rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => {
if region.is_some() {
// FIXME: Report an error.
had_error = true;
}
rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => {
lifetime = Some(outlives_predicate.1);
None
}
rustc_type_ir::ClauseKind::RegionOutlives(_)
| rustc_type_ir::ClauseKind::ConstArgHasType(_, _)
| rustc_type_ir::ClauseKind::WellFormed(_)
| rustc_type_ir::ClauseKind::ConstEvaluatable(_)
| rustc_type_ir::ClauseKind::HostEffect(_)
| rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(),
})
.transpose()
{
lowered_bounds.push(bound);
region = Some(outlives_predicate.1);
}
rustc_type_ir::ClauseKind::RegionOutlives(_)
| rustc_type_ir::ClauseKind::ConstArgHasType(_, _)
| rustc_type_ir::ClauseKind::WellFormed(_)
| rustc_type_ir::ClauseKind::ConstEvaluatable(_)
| rustc_type_ir::ClauseKind::HostEffect(_)
| rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(),
}
})
}
let mut multiple_regular_traits = false;
let mut multiple_same_projection = false;
lowered_bounds.sort_unstable_by(|lhs, rhs| {
use std::cmp::Ordering;
match ((*lhs).skip_binder(), (*rhs).skip_binder()) {
(ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => {
multiple_regular_traits = true;
// Order doesn't matter - we error
Ordering::Equal
}
(
ExistentialPredicate::AutoTrait(lhs_id),
ExistentialPredicate::AutoTrait(rhs_id),
) => lhs_id.0.cmp(&rhs_id.0),
(ExistentialPredicate::Trait(_), _) => Ordering::Less,
(_, ExistentialPredicate::Trait(_)) => Ordering::Greater,
(ExistentialPredicate::AutoTrait(_), _) => Ordering::Less,
(_, ExistentialPredicate::AutoTrait(_)) => Ordering::Greater,
(
ExistentialPredicate::Projection(lhs),
ExistentialPredicate::Projection(rhs),
) => {
let lhs_id = match lhs.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => unreachable!(),
};
let rhs_id = match rhs.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => unreachable!(),
};
// We only compare the `associated_ty_id`s. We shouldn't have
// multiple bounds for an associated type in the correct Rust code,
// and if we do, we error out.
if lhs_id == rhs_id {
multiple_same_projection = true;
if had_error {
return None;
}
if principal.is_none() && auto_traits.is_empty() {
// No traits is not allowed.
return None;
}
// `Send + Sync` is the same as `Sync + Send`.
auto_traits.sort_unstable();
// Duplicate auto traits are permitted.
auto_traits.dedup();
// Map the projection bounds onto a key that makes it easy to remove redundant
// bounds that are constrained by supertraits of the principal def id.
//
// Also make sure we detect conflicting bounds from expanding a trait alias and
// also specifying it manually, like:
// ```
// type Alias = Trait<Assoc = i32>;
// let _: &dyn Alias<Assoc = u32> = /* ... */;
// ```
let mut projection_bounds = FxIndexMap::default();
for proj in projections {
let key = (
proj.skip_binder().def_id().expect_type_alias(),
interner.anonymize_bound_vars(
proj.map_bound(|proj| proj.projection_term.trait_ref(interner)),
),
);
if let Some(old_proj) = projection_bounds.insert(key, proj)
&& interner.anonymize_bound_vars(proj)
!= interner.anonymize_bound_vars(old_proj)
{
// FIXME: Report "conflicting associated type" error.
}
}
// A stable ordering of associated types from the principal trait and all its
// supertraits. We use this to ensure that different substitutions of a trait
// don't result in `dyn Trait` types with different projections lists, which
// can be unsound: <https://github.com/rust-lang/rust/pull/136458>.
// We achieve a stable ordering by walking over the unsubstituted principal
// trait ref.
let mut ordered_associated_types = vec![];
if let Some(principal_trait) = principal {
for clause in elaborate::elaborate(
interner,
[Clause::upcast_from(
TraitRef::identity(interner, principal_trait.def_id()),
interner,
)],
)
.filter_only_self()
{
let clause = clause.instantiate_supertrait(interner, principal_trait);
debug!("observing object predicate `{clause:?}`");
let bound_predicate = clause.kind();
match bound_predicate.skip_binder() {
ClauseKind::Trait(pred) => {
// FIXME(negative_bounds): Handle this correctly...
let trait_ref = interner
.anonymize_bound_vars(bound_predicate.rebind(pred.trait_ref));
ordered_associated_types.extend(
pred.trait_ref
.def_id
.0
.trait_items(self.db)
.associated_types()
.map(|item| (item, trait_ref)),
);
}
lhs_id.as_id().index().cmp(&rhs_id.as_id().index())
ClauseKind::Projection(pred) => {
let pred = bound_predicate.rebind(pred);
// A `Self` within the original bound will be instantiated with a
// `trait_object_dummy_self`, so check for that.
let references_self = match pred.skip_binder().term.kind() {
TermKind::Ty(ty) => {
ty.walk().any(|arg| arg == dummy_self_ty.into())
}
// FIXME(associated_const_equality): We should walk the const instead of not doing anything
TermKind::Const(_) => false,
};
// If the projection output contains `Self`, force the user to
// elaborate it explicitly to avoid a lot of complexity.
//
// The "classically useful" case is the following:
// ```
// trait MyTrait: FnMut() -> <Self as MyTrait>::MyOutput {
// type MyOutput;
// }
// ```
//
// Here, the user could theoretically write `dyn MyTrait<MyOutput = X>`,
// but actually supporting that would "expand" to an infinitely-long type
// `fix $ τ → dyn MyTrait<MyOutput = X, Output = <τ as MyTrait>::MyOutput`.
//
// Instead, we force the user to write
// `dyn MyTrait<MyOutput = X, Output = X>`, which is uglier but works. See
// the discussion in #56288 for alternatives.
if !references_self {
let key = (
pred.skip_binder().projection_term.def_id.expect_type_alias(),
interner.anonymize_bound_vars(pred.map_bound(|proj| {
proj.projection_term.trait_ref(interner)
})),
);
if !projection_bounds.contains_key(&key) {
projection_bounds.insert(key, pred);
}
}
}
_ => (),
}
}
}
// We compute the list of projection bounds taking the ordered associated types,
// and check if there was an entry in the collected `projection_bounds`. Those
// are computed by first taking the user-written associated types, then elaborating
// the principal trait ref, and only using those if there was no user-written.
// See note below about how we handle missing associated types with `Self: Sized`,
// which are not required to be provided, but are still used if they are provided.
let mut projection_bounds: Vec<_> = ordered_associated_types
.into_iter()
.filter_map(|key| projection_bounds.get(&key).copied())
.collect();
projection_bounds.sort_unstable_by_key(|proj| proj.skip_binder().def_id());
let principal = principal.map(|principal| {
principal.map_bound(|principal| {
// Verify that `dummy_self` did not leak inside default type parameters.
let args: Vec<_> = principal
.args
.iter()
// Skip `Self`
.skip(1)
.map(|arg| {
if arg.walk().any(|arg| arg == dummy_self_ty.into()) {
// FIXME: Report an error.
Ty::new_error(interner, ErrorGuaranteed).into()
} else {
arg
}
})
.collect();
ExistentialPredicate::Trait(ExistentialTraitRef::new(
interner,
principal.def_id,
args,
))
})
});
if multiple_regular_traits || multiple_same_projection {
return None;
}
let projections = projection_bounds.into_iter().map(|proj| {
proj.map_bound(|mut proj| {
// Like for trait refs, verify that `dummy_self` did not leak inside default type
// parameters.
let references_self = proj.projection_term.args.iter().skip(1).any(|arg| {
if arg.walk().any(|arg| arg == dummy_self_ty.into()) {
return true;
}
false
});
if references_self {
proj.projection_term =
replace_dummy_self_with_error(interner, proj.projection_term);
}
if !lowered_bounds.first().map_or(false, |b| {
matches!(
b.as_ref().skip_binder(),
ExistentialPredicate::Trait(_) | ExistentialPredicate::AutoTrait(_)
)
}) {
return None;
}
ExistentialPredicate::Projection(ExistentialProjection::erase_self_ty(
interner, proj,
))
})
});
// As multiple occurrences of the same auto traits *are* permitted, we deduplicate the
// bounds. We shouldn't have repeated elements besides auto traits at this point.
lowered_bounds.dedup();
let auto_traits = auto_traits.into_iter().map(|auto_trait| {
Binder::dummy(ExistentialPredicate::AutoTrait(auto_trait.into()))
});
Some(BoundExistentialPredicates::new_from_iter(interner, lowered_bounds))
// N.b. principal, projections, auto traits
Some(BoundExistentialPredicates::new_from_iter(
interner,
principal.into_iter().chain(projections).chain(auto_traits),
))
});
if let Some(bounds) = bounds {
let region = match lifetime {
let region = match region {
Some(it) => match it.kind() {
rustc_type_ir::RegionKind::ReBound(BoundVarIndexKind::Bound(db), var) => {
Region::new_bound(
@ -873,12 +989,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
}
fn lower_impl_trait(
&mut self,
def_id: SolverDefId,
bounds: &[TypeBound],
krate: Crate,
) -> ImplTrait<'db> {
fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait<'db> {
let interner = self.interner;
cov_mark::hit!(lower_rpit);
let args = GenericArgs::identity_for_item(interner, def_id);
@ -894,7 +1005,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
if !ctx.unsized_types.contains(&self_ty) {
let sized_trait = LangItem::Sized.resolve_trait(self.db, krate);
let sized_trait = self.lang_items.Sized;
let sized_clause = sized_trait.map(|trait_id| {
let trait_ref = TraitRef::new_from_args(
interner,
@ -935,6 +1046,26 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
}
fn dyn_trait_dummy_self(interner: DbInterner<'_>) -> Ty<'_> {
// This type must not appear anywhere except here.
Ty::new_fresh(interner, 0)
}
fn replace_dummy_self_with_error<'db, T: TypeFoldable<DbInterner<'db>>>(
interner: DbInterner<'db>,
t: T,
) -> T {
let dyn_trait_dummy_self = dyn_trait_dummy_self(interner);
t.fold_with(&mut BottomUpFolder {
interner,
ty_op: |ty| {
if ty == dyn_trait_dummy_self { Ty::new_error(interner, ErrorGuaranteed) } else { ty }
},
lt_op: |lt| lt,
ct_op: |ct| ct,
})
}
pub(crate) fn lower_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
match m {
hir_def::type_ref::Mutability::Shared => Mutability::Not,
@ -1101,7 +1232,7 @@ impl ValueTyDefId {
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
match def {
TyDefId::BuiltinType(it) => EarlyBinder::bind(Ty::from_builtin_type(interner, it)),
TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt(
@ -1116,7 +1247,7 @@ pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBind
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
EarlyBinder::bind(Ty::new_fn_def(
interner,
CallableDefId::FunctionId(def).into(),
@ -1165,7 +1296,7 @@ fn type_for_struct_constructor<'db>(
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
FieldsShape::Tuple => {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
Some(EarlyBinder::bind(Ty::new_fn_def(
interner,
CallableDefId::StructId(def).into(),
@ -1185,7 +1316,7 @@ fn type_for_enum_variant_constructor<'db>(
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())),
FieldsShape::Tuple => {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
Some(EarlyBinder::bind(Ty::new_fn_def(
interner,
CallableDefId::EnumVariantId(def).into(),
@ -1216,7 +1347,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>(
let type_alias_data = db.type_alias_signature(t);
let mut diags = None;
let resolver = t.resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) {
EarlyBinder::bind(Ty::new_foreign(interner, t.into()))
} else {
@ -1244,7 +1375,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>(
db: &'db dyn HirDatabase,
_adt: TypeAliasId,
) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) {
(EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None)
(EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None)
}
pub(crate) fn impl_self_ty_query<'db>(
@ -1277,7 +1408,7 @@ pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
db: &dyn HirDatabase,
_impl_id: ImplId,
) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) {
(EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None)
(EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None)
}
pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> {
@ -1292,7 +1423,7 @@ pub(crate) fn const_param_ty_with_diagnostics_query<'db>(
let (parent_data, store) = db.generic_params_and_store(def.parent());
let data = &parent_data[def.local_id()];
let resolver = def.parent().resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
@ -1313,10 +1444,9 @@ pub(crate) fn const_param_ty_with_diagnostics_query<'db>(
pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>(
db: &'db dyn HirDatabase,
_: crate::db::HirDatabaseData,
def: ConstParamId,
_def: ConstParamId,
) -> (Ty<'db>, Diagnostics) {
let resolver = def.parent().resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
(Ty::new_error(interner, ErrorGuaranteed), None)
}
@ -1374,7 +1504,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
assoc_name: Option<Name>,
) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
let generics = generics(db, def);
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@ -1401,9 +1531,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
let TypeRef::Path(path) = &ctx.store[path.type_ref()] else {
return false;
};
let Some(pointee_sized) =
LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate())
else {
let Some(pointee_sized) = ctx.lang_items.PointeeSized else {
return false;
};
// Lower the path directly with `Resolver` instead of PathLoweringContext`
@ -1466,9 +1594,13 @@ pub(crate) fn generic_predicates_for_param<'db>(
let args = GenericArgs::identity_for_item(interner, def.into());
if !args.is_empty() {
let explicitly_unsized_tys = ctx.unsized_types;
if let Some(implicitly_sized_predicates) =
implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &args, &resolver)
{
if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(
db,
ctx.lang_items,
param_id.parent,
&explicitly_unsized_tys,
&args,
) {
predicates.extend(implicitly_sized_predicates);
};
}
@ -1520,8 +1652,7 @@ pub fn type_alias_bounds_with_diagnostics<'db>(
}
if !ctx.unsized_types.contains(&interner_ty) {
let sized_trait = LangItem::Sized
.resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
let sized_trait = ctx.lang_items.Sized;
if let Some(sized_trait) = sized_trait {
let trait_ref = TraitRef::new_from_args(
interner,
@ -1625,7 +1756,7 @@ pub(crate) fn trait_environment_query<'db>(
def: GenericDefId,
) -> Arc<TraitEnvironment<'db>> {
let module = def.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
let interner = DbInterner::new_with(db, module.krate());
let predicates = GenericPredicates::query_all(db, def);
let traits_in_scope = predicates
.iter_identity_copied()
@ -1663,7 +1794,7 @@ where
{
let generics = generics(db, def);
let resolver = def.resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
@ -1671,7 +1802,7 @@ where
def,
LifetimeElisionKind::AnonymousReportError,
);
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
let sized_trait = ctx.lang_items.Sized;
let mut predicates = Vec::new();
let all_generics =
@ -1811,7 +1942,7 @@ fn push_const_arg_has_type_predicates<'db>(
predicates: &mut Vec<Clause<'db>>,
generics: &Generics,
) {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let const_params_offset = generics.len_parent() + generics.len_lifetimes_self();
for (param_index, (param_idx, param_data)) in generics.iter_self_type_or_consts().enumerate() {
if !matches!(param_data, TypeOrConstParamData::ConstParamData(_)) {
@ -1839,13 +1970,13 @@ fn push_const_arg_has_type_predicates<'db>(
/// Exception is Self of a trait def.
fn implicitly_sized_clauses<'a, 'subst, 'db>(
db: &'db dyn HirDatabase,
lang_items: &LangItems,
def: GenericDefId,
explicitly_unsized_tys: &'a FxHashSet<Ty<'db>>,
args: &'subst GenericArgs<'db>,
resolver: &Resolver<'db>,
) -> Option<impl Iterator<Item = Clause<'db>> + Captures<'a> + Captures<'subst>> {
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate())?;
let interner = DbInterner::new_no_crate(db);
let sized_trait = lang_items.Sized?;
let trait_self_idx = trait_self_param_idx(db, def);
@ -1992,7 +2123,7 @@ fn fn_sig_for_fn<'db>(
) -> EarlyBinder<'db, PolyFnSig<'db>> {
let data = db.function_signature(def);
let resolver = def.resolver(db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
let mut ctx_params = TyLoweringContext::new(
db,
&resolver,
@ -2028,7 +2159,7 @@ fn fn_sig_for_fn<'db>(
}
fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let args = GenericArgs::identity_for_item(interner, adt.into());
let ty = Ty::new_adt(interner, adt, args);
EarlyBinder::bind(ty)
@ -2043,7 +2174,7 @@ fn fn_sig_for_struct_constructor<'db>(
let ret = type_for_adt(db, def.into()).skip_binder();
let inputs_and_output =
Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret)));
Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret)));
EarlyBinder::bind(Binder::dummy(FnSig {
abi: FnAbi::RustCall,
c_variadic: false,
@ -2062,7 +2193,7 @@ fn fn_sig_for_enum_variant_constructor<'db>(
let ret = type_for_adt(db, parent.into()).skip_binder();
let inputs_and_output =
Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret)));
Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret)));
EarlyBinder::bind(Binder::dummy(FnSig {
abi: FnAbi::RustCall,
c_variadic: false,
@ -2078,7 +2209,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> {
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
let interner = DbInterner::new_no_crate(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
@ -2139,7 +2270,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
}
if !ctx.unsized_types.contains(&self_ty)
&& let Some(sized_trait) = LangItem::Sized.resolve_trait(db, resolver.krate())
&& let Some(sized_trait) = ctx.lang_items.Sized
{
let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new(
interner,
@ -2157,7 +2288,8 @@ pub(crate) fn associated_type_by_name_including_super_traits<'db>(
trait_ref: TraitRef<'db>,
name: &Name,
) -> Option<(TraitRef<'db>, TypeAliasId)> {
let interner = DbInterner::new_with(db, None, None);
let module = trait_ref.def_id.0.module(db);
let interner = DbInterner::new_with(db, module.krate());
rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| {
let trait_id = t.as_ref().skip_binder().def_id.0;
let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?;
@ -2171,7 +2303,7 @@ pub fn associated_type_shorthand_candidates(
res: TypeNs,
mut cb: impl FnMut(&Name, TypeAliasId) -> bool,
) -> Option<TypeAliasId> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| {
cb(name, id).then_some(id)
})

View file

@ -1100,7 +1100,7 @@ pub(crate) fn substs_from_args_and_bindings<'db>(
explicit_self_ty: Option<Ty<'db>>,
ctx: &mut impl GenericArgsLowerer<'db>,
) -> GenericArgs<'db> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
tracing::debug!(?args_and_bindings);

View file

@ -15,6 +15,7 @@ use base_db::Crate;
use hir_def::{
AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
ModuleId, TraitId,
attrs::AttrFlags,
expr_store::path::GenericArgs as HirGenericArgs,
hir::ExprId,
nameres::{DefMap, block_def_map, crate_def_map},
@ -80,7 +81,7 @@ pub struct MethodResolutionContext<'a, 'db> {
pub unstable_features: &'a MethodResolutionUnstableFeatures,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
pub enum CandidateId {
FunctionId(FunctionId),
ConstId(ConstId),
@ -418,7 +419,7 @@ pub(crate) fn lookup_impl_method_query<'db>(
func: FunctionId,
fn_subst: GenericArgs<'db>,
) -> (FunctionId, GenericArgs<'db>) {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ItemContainerId::TraitId(trait_id) = func.loc(db).container else {
@ -509,9 +510,8 @@ fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Cra
pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
let has_incoherent_impls = match self_ty.def() {
Some(def_id) => match def_id.try_into() {
Ok(def_id) => {
db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists()
}
Ok(def_id) => AttrFlags::query(db, def_id)
.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
Err(()) => true,
},
_ => true,
@ -597,7 +597,7 @@ impl InherentImpls {
continue;
}
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let self_ty = db.impl_self_ty(impl_id);
let self_ty = self_ty.instantiate_identity();
if let Some(self_ty) =
@ -715,7 +715,9 @@ impl TraitImpls {
// FIXME: Reservation impls should be considered during coherence checks. If we are
// (ever) to implement coherence checks, this filtering should be done by the trait
// solver.
if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
if AttrFlags::query(db, impl_id.into())
.contains(AttrFlags::RUSTC_RESERVATION_IMPL)
{
continue;
}
let trait_ref = match db.impl_trait(impl_id) {
@ -723,7 +725,7 @@ impl TraitImpls {
None => continue,
};
let self_ty = trait_ref.self_ty();
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let entry = map.entry(trait_ref.def_id.0).or_default();
match simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) {
Some(self_ty) => {

View file

@ -5,7 +5,6 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ItemContainerId, TraitId,
expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs},
hir::{ExprId, generics::GenericParamDataRef},
lang_item::LangItem,
};
use rustc_type_ir::{
TypeFoldable,
@ -481,7 +480,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
}
Err(_) => {
if self.ctx.unstable_features.arbitrary_self_types {
self.ctx.result.type_mismatches.insert(
self.ctx.result.type_mismatches.get_or_insert_default().insert(
self.expr.into(),
TypeMismatch { expected: method_self_ty, actual: self_ty },
);
@ -550,9 +549,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
&self,
predicates: impl Iterator<Item = Clause<'db>>,
) -> bool {
let Some(sized_def_id) =
LangItem::Sized.resolve_trait(self.db(), self.ctx.resolver.krate())
else {
let Some(sized_def_id) = self.ctx.lang_items.Sized else {
return false;
};
@ -570,9 +567,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
fn check_for_illegal_method_calls(&self) {
// Disallow calls to the method `drop` defined in the `Drop` trait.
if let ItemContainerId::TraitId(trait_def_id) = self.candidate.loc(self.db()).container
&& LangItem::Drop
.resolve_trait(self.db(), self.ctx.resolver.krate())
.is_some_and(|drop_trait| drop_trait == trait_def_id)
&& self.ctx.lang_items.Drop.is_some_and(|drop_trait| drop_trait == trait_def_id)
{
// FIXME: Report an error.
}

View file

@ -134,27 +134,27 @@ impl<'db> Operand<'db> {
func_id: hir_def::FunctionId,
generic_args: GenericArgs<'db>,
) -> Operand<'db> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
Operand::from_bytes(Box::default(), ty)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V, T> {
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub enum ProjectionElem<'db, V: PartialEq> {
Deref,
Field(Either<FieldId, TupleFieldId>),
// FIXME: get rid of this, and use FieldId for tuples and closures
ClosureField(usize),
Index(V),
Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V),
ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 },
//Downcast(Option<Symbol>, VariantIdx),
OpaqueCast(T),
OpaqueCast(Ty<'db>),
}
impl<V, T> ProjectionElem<V, T> {
pub fn projected_ty<'db>(
impl<'db, V: PartialEq> ProjectionElem<'db, V> {
pub fn projected_ty(
&self,
infcx: &InferCtxt<'db>,
mut base: Ty<'db>,
@ -254,7 +254,7 @@ impl<V, T> ProjectionElem<V, T> {
}
}
type PlaceElem<'db> = ProjectionElem<LocalId<'db>, Ty<'db>>;
type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);

View file

@ -12,7 +12,7 @@ use stdx::never;
use triomphe::Arc;
use crate::{
TraitEnvironment,
InferenceResult, TraitEnvironment,
db::{HirDatabase, InternedClosure, InternedClosureId},
display::DisplayTarget,
mir::OperandKind,
@ -97,7 +97,7 @@ pub fn borrowck_query<'db>(
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>> {
let _p = tracing::info_span!("borrowck_query").entered();
let module = def.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
let interner = DbInterner::new_with(db, module.krate());
let env = db.trait_environment_for_body(def);
let mut res = vec![];
// This calculates opaques defining scope which is a bit costly therefore is put outside `all_mir_bodies()`.
@ -121,10 +121,10 @@ fn make_fetch_closure_field<'db>(
) -> impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db> + use<'db> {
|c: InternedClosureId, subst: GenericArgs<'db>, f: usize| {
let InternedClosure(def, _) = db.lookup_intern_closure(c);
let infer = db.infer(def);
let infer = InferenceResult::for_body(db, def);
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.split_closure_args_untupled().parent_args;
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst)
}
}

View file

@ -9,7 +9,7 @@ use hir_def::{
Lookup, StaticId, VariantId,
expr_store::HygieneId,
item_tree::FieldsShape,
lang_item::LangItem,
lang_item::LangItems,
layout::{TagEncoding, Variants},
resolver::{HasResolver, TypeNs, ValueNs},
signatures::{StaticFlags, StructFlags},
@ -34,7 +34,7 @@ use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
CallableDefId, ComplexMemoryMap, MemoryMap, TraitEnvironment,
CallableDefId, ComplexMemoryMap, InferenceResult, MemoryMap, TraitEnvironment,
consteval::{self, ConstEvalError, try_const_usize},
db::{HirDatabase, InternedClosure, InternedClosureId},
display::{ClosureStyle, DisplayTarget, HirDisplay},
@ -641,8 +641,9 @@ impl<'db> Evaluator<'db> {
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
let interner = DbInterner::new_with(db, Some(crate_id), module.containing_block());
let interner = DbInterner::new_with(db, crate_id);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let lang_items = interner.lang_items();
Ok(Evaluator {
target_data_layout,
stack: vec![0],
@ -667,13 +668,13 @@ impl<'db> Evaluator<'db> {
mir_or_dyn_index_cache: RefCell::new(Default::default()),
unused_locals_store: RefCell::new(Default::default()),
cached_ptr_size,
cached_fn_trait_func: LangItem::Fn
.resolve_trait(db, crate_id)
cached_fn_trait_func: lang_items
.Fn
.and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
cached_fn_mut_trait_func: lang_items.FnMut.and_then(|x| {
x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
}),
cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
cached_fn_once_trait_func: lang_items.FnOnce.and_then(|x| {
x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
}),
infcx,
@ -685,6 +686,11 @@ impl<'db> Evaluator<'db> {
self.infcx.interner
}
#[inline]
fn lang_items(&self) -> &'db LangItems {
self.infcx.interner.lang_items()
}
fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
@ -716,7 +722,7 @@ impl<'db> Evaluator<'db> {
ty,
|c, subst, f| {
let InternedClosure(def, _) = self.db.lookup_intern_closure(c);
let infer = self.db.infer(def);
let infer = InferenceResult::for_body(self.db, def);
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.split_closure_args_untupled().parent_args;
captures
@ -877,7 +883,8 @@ impl<'db> Evaluator<'db> {
OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
OperandKind::Constant { konst: _, ty } => *ty,
&OperandKind::Static(s) => {
let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr];
let ty =
InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr];
Ty::new_ref(
self.interner(),
Region::new_static(self.interner()),
@ -2803,7 +2810,8 @@ impl<'db> Evaluator<'db> {
})?;
self.allocate_const_in_heap(locals, konst)?
} else {
let ty = self.db.infer(st.into())[self.db.body(st.into()).body_expr];
let ty =
InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr];
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized extern static");
};
@ -2864,7 +2872,7 @@ impl<'db> Evaluator<'db> {
span: MirSpan,
) -> Result<'db, ()> {
let Some(drop_fn) = (|| {
let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
let drop_trait = self.lang_items().Drop?;
drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
})() else {
// in some tests we don't have drop trait in minicore, and

View file

@ -3,19 +3,20 @@
//!
use std::cmp::{self, Ordering};
use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature};
use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
use hir_expand::name::Name;
use intern::{Symbol, sym};
use intern::sym;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use stdx::never;
use crate::{
InferenceResult,
display::DisplayTarget,
drop::{DropGlue, has_drop_glue},
mir::eval::{
Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, LangItem,
Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals,
Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
},
next_solver::Region,
};
@ -38,6 +39,13 @@ macro_rules! not_supported {
};
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum EvalLangItem {
BeginPanic,
SliceLen,
DropInPlace,
}
impl<'db> Evaluator<'db> {
pub(super) fn detect_and_exec_special_function(
&mut self,
@ -53,7 +61,7 @@ impl<'db> Evaluator<'db> {
}
let function_data = self.db.function_signature(def);
let attrs = self.db.attrs(def.into());
let attrs = AttrFlags::query(self.db, def.into());
let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
if is_intrinsic {
@ -65,7 +73,7 @@ impl<'db> Evaluator<'db> {
locals,
span,
!function_data.has_body()
|| attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
|| attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
);
}
let is_extern_c = match def.lookup(self.db).container {
@ -85,18 +93,13 @@ impl<'db> Evaluator<'db> {
.map(|()| true);
}
let alloc_fn =
attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| {
[
&sym::rustc_allocator,
&sym::rustc_deallocator,
&sym::rustc_reallocator,
&sym::rustc_allocator_zeroed,
]
.contains(it)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
if attrs.intersects(
AttrFlags::RUSTC_ALLOCATOR
| AttrFlags::RUSTC_DEALLOCATOR
| AttrFlags::RUSTC_REALLOCATOR
| AttrFlags::RUSTC_ALLOCATOR_ZEROED,
) {
self.exec_alloc_fn(attrs, args, destination)?;
return Ok(true);
}
if let Some(it) = self.detect_lang_function(def) {
@ -105,7 +108,7 @@ impl<'db> Evaluator<'db> {
return Ok(true);
}
if let ItemContainerId::TraitId(t) = def.lookup(self.db).container
&& self.db.lang_attr(t.into()) == Some(LangItem::Clone)
&& Some(t) == self.lang_items().Clone
{
let [self_ty] = generic_args.as_slice() else {
not_supported!("wrong generic arg count for clone");
@ -131,12 +134,8 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
) -> Result<'db, Option<FunctionId>> {
// `PanicFmt` is redirected to `ConstPanicFmt`
if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
let resolver = CrateRootModuleId::from(self.crate_id).resolver(self.db);
let Some(const_panic_fmt) =
LangItem::ConstPanicFmt.resolve_function(self.db, resolver.krate())
else {
if Some(def) == self.lang_items().PanicFmt {
let Some(const_panic_fmt) = self.lang_items().ConstPanicFmt else {
not_supported!("const_panic_fmt lang item not found or not a function");
};
return Ok(Some(const_panic_fmt));
@ -169,7 +168,7 @@ impl<'db> Evaluator<'db> {
};
let addr = Address::from_bytes(arg.get(self)?)?;
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure(id.0);
let infer = self.db.infer(closure_owner);
let infer = InferenceResult::for_body(self.db, closure_owner);
let (captures, _) = infer.closure_info(id.0);
let layout = self.layout(self_ty)?;
let db = self.db;
@ -245,12 +244,14 @@ impl<'db> Evaluator<'db> {
fn exec_alloc_fn(
&mut self,
alloc_fn: &Symbol,
alloc_fn: AttrFlags,
args: &[IntervalAndTy<'db>],
destination: Interval,
) -> Result<'db, ()> {
match alloc_fn {
_ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
_ if alloc_fn
.intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
{
let [size, align] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@ -261,8 +262,8 @@ impl<'db> Evaluator<'db> {
let result = self.heap_allocate(size, align)?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
_ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ }
_ if *alloc_fn == sym::rustc_reallocator => {
_ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ }
_ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@ -286,19 +287,26 @@ impl<'db> Evaluator<'db> {
Ok(())
}
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
let attrs = self.db.attrs(def.into());
fn detect_lang_function(&self, def: FunctionId) -> Option<EvalLangItem> {
use EvalLangItem::*;
let lang_items = self.lang_items();
let attrs = AttrFlags::query(self.db, def.into());
if attrs.by_key(sym::rustc_const_panic_str).exists() {
if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
// `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
return Some(LangItem::BeginPanic);
return Some(BeginPanic);
}
let candidate = attrs.lang_item()?;
// We want to execute these functions with special logic
// `PanicFmt` is not detected here as it's redirected later.
if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
if let Some((_, candidate)) = [
(lang_items.BeginPanic, BeginPanic),
(lang_items.SliceLen, SliceLen),
(lang_items.DropInPlace, DropInPlace),
]
.iter()
.find(|&(candidate, _)| candidate == Some(def))
{
return Some(candidate);
}
@ -307,13 +315,13 @@ impl<'db> Evaluator<'db> {
fn exec_lang_item(
&mut self,
it: LangItem,
it: EvalLangItem,
generic_args: GenericArgs<'db>,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
span: MirSpan,
) -> Result<'db, Vec<u8>> {
use LangItem::*;
use EvalLangItem::*;
let mut args = args.iter();
match it {
BeginPanic => {
@ -374,7 +382,6 @@ impl<'db> Evaluator<'db> {
)?;
Ok(vec![])
}
it => not_supported!("Executing lang item {it:?}"),
}
}
@ -1219,7 +1226,7 @@ impl<'db> Evaluator<'db> {
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id)
if let Some(target) = self.lang_items().FnOnce
&& let Some(def) = target
.trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::call_once))
@ -1329,7 +1336,7 @@ impl<'db> Evaluator<'db> {
{
result = (l as i8).cmp(&(r as i8));
}
if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
if let Some(e) = self.lang_items().Ordering {
let ty = self.db.ty(e.into()).skip_binder();
let r = self.compute_discriminant(ty, &[result as i8 as u8])?;
destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;

View file

@ -17,7 +17,7 @@ use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
crate::attach_db(db, || {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;

View file

@ -12,7 +12,7 @@ use hir_def::{
Pat, PatId, RecordFieldPat, RecordLitField,
},
item_tree::FieldsShape,
lang_item::{LangItem, LangItemTarget, lang_item},
lang_item::LangItems,
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
};
use hir_expand::name::Name;
@ -110,7 +110,7 @@ pub enum MirLowerError<'db> {
Loop,
/// Something that should never happen and is definitely a bug, but we don't want to panic if it happened
ImplementationError(String),
LangItemNotFound(LangItem),
LangItemNotFound,
MutatingRvalue,
UnresolvedLabel,
UnresolvedUpvar(Place<'db>),
@ -232,7 +232,7 @@ impl MirLowerError<'_> {
| MirLowerError::BreakWithoutLoop
| MirLowerError::Loop
| MirLowerError::ImplementationError(_)
| MirLowerError::LangItemNotFound(_)
| MirLowerError::LangItemNotFound
| MirLowerError::MutatingRvalue
| MirLowerError::UnresolvedLabel
| MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{self:?}")?,
@ -302,7 +302,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
};
let resolver = owner.resolver(db);
let env = db.trait_environment_for_body(owner);
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
// FIXME(next-solver): Is `non_body_analysis()` correct here? Don't we want to reveal opaque types defined by this body?
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
@ -327,6 +327,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.infcx.interner
}
#[inline]
fn lang_items(&self) -> &'db LangItems {
self.infcx.interner.lang_items()
}
fn temp(
&mut self,
ty: Ty<'db>,
@ -1816,11 +1821,6 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(())
}
fn resolve_lang_item(&self, item: LangItem) -> Result<'db, LangItemTarget> {
let crate_id = self.owner.module(self.db).krate();
lang_item(self.db, crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
}
fn lower_block_to_place(
&mut self,
statements: &[hir_def::hir::Statement],
@ -2111,7 +2111,7 @@ pub fn mir_body_for_closure_query<'db>(
) -> Result<'db, Arc<MirBody<'db>>> {
let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
let body = db.body(owner);
let infer = db.infer(owner);
let infer = InferenceResult::for_body(db, owner);
let Expr::Closure { args, body: root, .. } = &body[expr] else {
implementation_error!("closure expression is not closure");
};
@ -2119,7 +2119,7 @@ pub fn mir_body_for_closure_query<'db>(
implementation_error!("closure expression is not closure");
};
let (captures, kind) = infer.closure_info(closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
let mut ctx = MirLowerCtx::new(db, owner, &body, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root] });
let closure_local = ctx.result.locals.alloc(Local {
@ -2249,8 +2249,8 @@ pub fn mir_body_query<'db>(
};
let _p = tracing::info_span!("mir_body_query", ?detail).entered();
let body = db.body(def);
let infer = db.infer(def);
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
let infer = InferenceResult::for_body(db, def);
let mut result = lower_to_mir(db, def, &body, infer, body.body_expr)?;
result.shrink_to_fit();
Ok(Arc::new(result))
}

View file

@ -2,7 +2,7 @@
use hir_def::FunctionId;
use intern::sym;
use rustc_type_ir::inherent::{AdtDef, Region as _, Ty as _};
use rustc_type_ir::inherent::{Region as _, Ty as _};
use super::*;
use crate::{
@ -177,13 +177,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
Expr::UnaryOp { expr, op: hir_def::hir::UnaryOp::Deref } => {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind() {
TyKind::Ref(..) | TyKind::RawPtr(..) => true,
TyKind::Adt(id, _) => {
if let Some(lang_item) = self.db.lang_attr(id.def_id().0.into()) {
lang_item == LangItem::OwnedBox
} else {
false
}
}
TyKind::Adt(id, _) => id.is_box(),
_ => false,
};
if !is_builtin {
@ -198,8 +192,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
expr_id.into(),
'b: {
if let Some((f, _)) = self.infer.method_resolution(expr_id)
&& let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
&& let Some(deref_trait) = self.lang_items().DerefMut
&& let Some(deref_fn) = deref_trait
.trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
@ -330,17 +323,18 @@ impl<'db> MirLowerCtx<'_, 'db> {
span: MirSpan,
mutability: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
let lang_items = self.lang_items();
let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
(
Mutability::Not,
LangItem::Deref,
lang_items.Deref,
Name::new_symbol_root(sym::deref),
BorrowKind::Shared,
)
} else {
(
Mutability::Mut,
LangItem::DerefMut,
lang_items.DerefMut,
Name::new_symbol_root(sym::deref_mut),
BorrowKind::Mut { kind: MutBorrowKind::Default },
)
@ -350,14 +344,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability);
let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
.resolve_lang_item(trait_lang_item)?
.as_trait()
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?;
let deref_fn = deref_trait
.trait_items(self.db)
.method_by_name(&trait_method_name)
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
.ok_or(MirLowerError::LangItemNotFound)?;
let deref_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(deref_fn).into(),

View file

@ -98,7 +98,7 @@ impl<'db> Filler<'db> {
env: Arc<TraitEnvironment<'db>>,
subst: GenericArgs<'db>,
) -> Self {
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
Self { infcx, trait_env: env, subst }
}

View file

@ -7,6 +7,7 @@ use rustc_type_ir::{
GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance,
inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
relate::{Relate, VarianceDiagInfo},
walk::TypeWalker,
};
use smallvec::SmallVec;
@ -78,6 +79,11 @@ impl<'db> GenericArg<'db> {
GenericParamId::LifetimeParamId(_) => Region::error(interner).into(),
}
}
#[inline]
pub fn walk(self) -> TypeWalker<DbInterner<'db>> {
TypeWalker::new(self)
}
}
impl<'db> From<Term<'db>> for GenericArg<'db> {

View file

@ -8,7 +8,6 @@ use std::sync::Arc;
pub use BoundRegionConversionTime::*;
use ena::unify as ut;
use hir_def::GenericParamId;
use hir_def::lang_item::LangItem;
use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage};
use region_constraints::{RegionConstraintCollector, RegionConstraintStorage};
use rustc_next_trait_solver::solve::SolverDelegateEvalExt;
@ -542,9 +541,7 @@ impl<'db> InferCtxt<'db> {
pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
let ty = self.resolve_vars_if_possible(ty);
let Some(copy_def_id) =
LangItem::Copy.resolve_trait(self.interner.db, self.interner.krate.unwrap())
else {
let Some(copy_def_id) = self.interner.lang_items().Copy else {
return false;
};

View file

@ -8,9 +8,10 @@ pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
use base_db::Crate;
use hir_def::{
AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule,
ItemContainerId, StructId, UnionId, VariantId,
lang_item::LangItem,
AdtId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, StructId,
UnionId, VariantId,
attrs::AttrFlags,
lang_item::LangItems,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
use la_arena::Idx;
@ -270,8 +271,8 @@ pub use crate::_interned_vec_db as interned_vec_db;
#[derive(Debug, Copy, Clone)]
pub struct DbInterner<'db> {
pub(crate) db: &'db dyn HirDatabase,
pub(crate) krate: Option<Crate>,
pub(crate) block: Option<BlockId>,
krate: Option<Crate>,
lang_items: Option<&'db LangItems>,
}
// FIXME: very wrong, see https://github.com/rust-lang/rust/pull/144808
@ -284,22 +285,42 @@ impl<'db> DbInterner<'db> {
crate::with_attached_db(|db| DbInterner {
db: unsafe { std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db) },
krate: None,
block: None,
lang_items: None,
})
}
pub fn new_with(
db: &'db dyn HirDatabase,
krate: Option<Crate>,
block: Option<BlockId>,
) -> DbInterner<'db> {
DbInterner { db, krate, block }
/// Creates a new interner without an active crate. Good only for interning things, not for trait solving etc..
/// As a rule of thumb, when you create an `InferCtxt`, you need to provide the crate (and the block).
///
/// Elaboration is a special kind: it needs lang items (for `Sized`), therefore it needs `new_with()`.
pub fn new_no_crate(db: &'db dyn HirDatabase) -> Self {
DbInterner { db, krate: None, lang_items: None }
}
pub fn new_with(db: &'db dyn HirDatabase, krate: Crate) -> DbInterner<'db> {
DbInterner {
db,
krate: Some(krate),
// As an approximation, when we call `new_with` we're trait solving, therefore we need the lang items.
// This is also convenient since here we have a starting crate but not in `new_no_crate`.
lang_items: Some(hir_def::lang_item::lang_items(db, krate)),
}
}
#[inline]
pub fn db(&self) -> &'db dyn HirDatabase {
self.db
}
#[inline]
#[track_caller]
pub fn lang_items(&self) -> &'db LangItems {
self.lang_items.expect(
"Must have `DbInterner::lang_items`.\n\n\
Note: you might have called `DbInterner::new_no_crate()` \
where you should've called `DbInterner::new_with()`",
)
}
}
// This is intentionally left as `()`
@ -479,28 +500,28 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))];
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = data.repr(db, struct_id);
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}
AdtId::UnionId(union_id) => {
let data = db.union_signature(union_id);
let flags = AdtFlags {
is_enum: false,
is_union: true,
@ -513,22 +534,24 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))];
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = AttrFlags::repr(db, union_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}
@ -552,24 +575,26 @@ impl AdtDef {
.map(|(idx, v)| (idx, VariantDef::Enum(v.0)))
.collect();
let data = db.enum_signature(enum_id);
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = AttrFlags::repr(db, enum_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}
@ -849,7 +874,7 @@ interned_vec_db!(PatList, Pattern);
macro_rules! as_lang_item {
(
$solver_enum:ident, $var:ident;
$solver_enum:ident, $self:ident, $def_id:expr;
ignore = {
$( $ignore:ident ),* $(,)?
@ -857,6 +882,7 @@ macro_rules! as_lang_item {
$( $variant:ident ),* $(,)?
) => {{
let lang_items = $self.lang_items();
// Ensure exhaustiveness.
if let Some(it) = None::<$solver_enum> {
match it {
@ -864,13 +890,32 @@ macro_rules! as_lang_item {
$( $solver_enum::$ignore => {} )*
}
}
match $var {
$( LangItem::$variant => Some($solver_enum::$variant), )*
match $def_id {
$( def_id if lang_items.$variant.is_some_and(|it| it == def_id) => Some($solver_enum::$variant), )*
_ => None
}
}};
}
macro_rules! is_lang_item {
(
$solver_enum:ident, $self:ident, $def_id:expr, $expected_variant:ident;
ignore = {
$( $ignore:ident ),* $(,)?
}
$( $variant:ident ),* $(,)?
) => {{
let lang_items = $self.lang_items();
let def_id = $def_id;
match $expected_variant {
$( $solver_enum::$variant => lang_items.$variant.is_some_and(|it| it == def_id), )*
$( $solver_enum::$ignore => false, )*
}
}};
}
impl<'db> Interner for DbInterner<'db> {
type DefId = SolverDefId;
type LocalDefId = SolverDefId;
@ -1253,8 +1298,7 @@ impl<'db> Interner for DbInterner<'db> {
}
fn generics_require_sized_self(self, def_id: Self::DefId) -> bool {
let sized_trait =
LangItem::Sized.resolve_trait(self.db(), self.krate.expect("Must have self.krate"));
let sized_trait = self.lang_items().Sized;
let Some(sized_id) = sized_trait else {
return false; /* No Sized trait, can't require it! */
};
@ -1428,84 +1472,69 @@ impl<'db> Interner for DbInterner<'db> {
}
fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId {
let lang_items = self.lang_items();
let lang_item = match lang_item {
SolverLangItem::AsyncFnKindUpvars => unimplemented!(),
SolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
SolverLangItem::CallOnceFuture => LangItem::CallOnceFuture,
SolverLangItem::CallRefFuture => LangItem::CallRefFuture,
SolverLangItem::CoroutineReturn => LangItem::CoroutineReturn,
SolverLangItem::CoroutineYield => LangItem::CoroutineYield,
SolverLangItem::DynMetadata => LangItem::DynMetadata,
SolverLangItem::FutureOutput => LangItem::FutureOutput,
SolverLangItem::Metadata => LangItem::Metadata,
SolverLangItem::AsyncFnOnceOutput => lang_items.AsyncFnOnceOutput,
SolverLangItem::CallOnceFuture => lang_items.CallOnceFuture,
SolverLangItem::CallRefFuture => lang_items.CallRefFuture,
SolverLangItem::CoroutineReturn => lang_items.CoroutineReturn,
SolverLangItem::CoroutineYield => lang_items.CoroutineYield,
SolverLangItem::FutureOutput => lang_items.FutureOutput,
SolverLangItem::Metadata => lang_items.Metadata,
SolverLangItem::DynMetadata => {
return lang_items.DynMetadata.expect("Lang item required but not found.").into();
}
};
let target = hir_def::lang_item::lang_item(
self.db(),
self.krate.expect("Must have self.krate"),
lang_item,
)
.unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."));
match target {
hir_def::lang_item::LangItemTarget::EnumId(enum_id) => enum_id.into(),
hir_def::lang_item::LangItemTarget::Function(function_id) => function_id.into(),
hir_def::lang_item::LangItemTarget::ImplDef(impl_id) => impl_id.into(),
hir_def::lang_item::LangItemTarget::Static(static_id) => static_id.into(),
hir_def::lang_item::LangItemTarget::Struct(struct_id) => struct_id.into(),
hir_def::lang_item::LangItemTarget::Union(union_id) => union_id.into(),
hir_def::lang_item::LangItemTarget::TypeAlias(type_alias_id) => type_alias_id.into(),
hir_def::lang_item::LangItemTarget::Trait(trait_id) => trait_id.into(),
hir_def::lang_item::LangItemTarget::EnumVariant(_) => unimplemented!(),
}
lang_item.expect("Lang item required but not found.").into()
}
fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapper {
let lang_items = self.lang_items();
let lang_item = match lang_item {
SolverTraitLangItem::AsyncFn => LangItem::AsyncFn,
SolverTraitLangItem::AsyncFn => lang_items.AsyncFn,
SolverTraitLangItem::AsyncFnKindHelper => unimplemented!(),
SolverTraitLangItem::AsyncFnMut => LangItem::AsyncFnMut,
SolverTraitLangItem::AsyncFnOnce => LangItem::AsyncFnOnce,
SolverTraitLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
SolverTraitLangItem::AsyncFnMut => lang_items.AsyncFnMut,
SolverTraitLangItem::AsyncFnOnce => lang_items.AsyncFnOnce,
SolverTraitLangItem::AsyncFnOnceOutput => unimplemented!(
"This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver."
),
SolverTraitLangItem::AsyncIterator => unimplemented!(),
SolverTraitLangItem::Clone => LangItem::Clone,
SolverTraitLangItem::Copy => LangItem::Copy,
SolverTraitLangItem::Coroutine => LangItem::Coroutine,
SolverTraitLangItem::Destruct => LangItem::Destruct,
SolverTraitLangItem::DiscriminantKind => LangItem::DiscriminantKind,
SolverTraitLangItem::Drop => LangItem::Drop,
SolverTraitLangItem::Fn => LangItem::Fn,
SolverTraitLangItem::FnMut => LangItem::FnMut,
SolverTraitLangItem::FnOnce => LangItem::FnOnce,
SolverTraitLangItem::FnPtrTrait => LangItem::FnPtrTrait,
SolverTraitLangItem::Clone => lang_items.Clone,
SolverTraitLangItem::Copy => lang_items.Copy,
SolverTraitLangItem::Coroutine => lang_items.Coroutine,
SolverTraitLangItem::Destruct => lang_items.Destruct,
SolverTraitLangItem::DiscriminantKind => lang_items.DiscriminantKind,
SolverTraitLangItem::Drop => lang_items.Drop,
SolverTraitLangItem::Fn => lang_items.Fn,
SolverTraitLangItem::FnMut => lang_items.FnMut,
SolverTraitLangItem::FnOnce => lang_items.FnOnce,
SolverTraitLangItem::FnPtrTrait => lang_items.FnPtrTrait,
SolverTraitLangItem::FusedIterator => unimplemented!(),
SolverTraitLangItem::Future => LangItem::Future,
SolverTraitLangItem::Iterator => LangItem::Iterator,
SolverTraitLangItem::PointeeTrait => LangItem::PointeeTrait,
SolverTraitLangItem::Sized => LangItem::Sized,
SolverTraitLangItem::MetaSized => LangItem::MetaSized,
SolverTraitLangItem::PointeeSized => LangItem::PointeeSized,
SolverTraitLangItem::TransmuteTrait => LangItem::TransmuteTrait,
SolverTraitLangItem::Tuple => LangItem::Tuple,
SolverTraitLangItem::Unpin => LangItem::Unpin,
SolverTraitLangItem::Unsize => LangItem::Unsize,
SolverTraitLangItem::Future => lang_items.Future,
SolverTraitLangItem::Iterator => lang_items.Iterator,
SolverTraitLangItem::PointeeTrait => lang_items.PointeeTrait,
SolverTraitLangItem::Sized => lang_items.Sized,
SolverTraitLangItem::MetaSized => lang_items.MetaSized,
SolverTraitLangItem::PointeeSized => lang_items.PointeeSized,
SolverTraitLangItem::TransmuteTrait => lang_items.TransmuteTrait,
SolverTraitLangItem::Tuple => lang_items.Tuple,
SolverTraitLangItem::Unpin => lang_items.Unpin,
SolverTraitLangItem::Unsize => lang_items.Unsize,
SolverTraitLangItem::BikeshedGuaranteedNoDrop => {
unimplemented!()
}
};
lang_item
.resolve_trait(self.db(), self.krate.expect("Must have self.krate"))
.unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."))
.into()
lang_item.expect("Lang item required but not found.").into()
}
fn require_adt_lang_item(self, lang_item: SolverAdtLangItem) -> AdtIdWrapper {
let lang_items = self.lang_items();
let lang_item = match lang_item {
SolverAdtLangItem::Option => LangItem::Option,
SolverAdtLangItem::Poll => LangItem::Poll,
SolverAdtLangItem::Option => lang_items.Option,
SolverAdtLangItem::Poll => lang_items.Poll,
};
lang_item
.resolve_adt(self.db(), self.krate.expect("Must have self.krate"))
.unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."))
.into()
AdtIdWrapper(lang_item.expect("Lang item required but not found.").into())
}
fn is_lang_item(self, def_id: Self::DefId, lang_item: SolverLangItem) -> bool {
@ -1514,53 +1543,101 @@ impl<'db> Interner for DbInterner<'db> {
}
fn is_trait_lang_item(self, def_id: Self::TraitId, lang_item: SolverTraitLangItem) -> bool {
self.as_trait_lang_item(def_id)
.map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
}
fn is_adt_lang_item(self, def_id: Self::AdtId, lang_item: SolverAdtLangItem) -> bool {
// FIXME: derive PartialEq on SolverTraitLangItem
self.as_adt_lang_item(def_id)
.map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
}
fn as_lang_item(self, def_id: Self::DefId) -> Option<SolverLangItem> {
let def_id: AttrDefId = match def_id {
SolverDefId::TraitId(id) => id.into(),
SolverDefId::TypeAliasId(id) => id.into(),
SolverDefId::AdtId(id) => id.into(),
_ => panic!("Unexpected SolverDefId in as_lang_item"),
};
let lang_item = self.db().lang_attr(def_id)?;
as_lang_item!(
SolverLangItem, lang_item;
ignore = {
AsyncFnKindUpvars,
}
Metadata,
DynMetadata,
CoroutineReturn,
CoroutineYield,
FutureOutput,
CallRefFuture,
CallOnceFuture,
AsyncFnOnceOutput,
)
}
fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem> {
let def_id: AttrDefId = def_id.0.into();
let lang_item = self.db().lang_attr(def_id)?;
as_lang_item!(
SolverTraitLangItem, lang_item;
is_lang_item!(
SolverTraitLangItem, self, def_id.0, lang_item;
ignore = {
AsyncFnKindHelper,
AsyncIterator,
BikeshedGuaranteedNoDrop,
FusedIterator,
AsyncFnOnceOutput, // This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver.
}
Sized,
MetaSized,
PointeeSized,
Unsize,
Copy,
Clone,
DiscriminantKind,
PointeeTrait,
FnPtrTrait,
Drop,
Destruct,
TransmuteTrait,
Fn,
FnMut,
FnOnce,
Future,
Coroutine,
Unpin,
Tuple,
Iterator,
AsyncFn,
AsyncFnMut,
AsyncFnOnce,
)
}
fn is_adt_lang_item(self, def_id: Self::AdtId, lang_item: SolverAdtLangItem) -> bool {
// FIXME: derive PartialEq on SolverTraitLangItem
self.as_adt_lang_item(def_id)
.map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
}
fn as_lang_item(self, def_id: Self::DefId) -> Option<SolverLangItem> {
match def_id {
SolverDefId::TypeAliasId(id) => {
as_lang_item!(
SolverLangItem, self, id;
ignore = {
AsyncFnKindUpvars,
DynMetadata,
}
Metadata,
CoroutineReturn,
CoroutineYield,
FutureOutput,
CallRefFuture,
CallOnceFuture,
AsyncFnOnceOutput,
)
}
SolverDefId::AdtId(AdtId::StructId(id)) => {
as_lang_item!(
SolverLangItem, self, id;
ignore = {
AsyncFnKindUpvars,
Metadata,
CoroutineReturn,
CoroutineYield,
FutureOutput,
CallRefFuture,
CallOnceFuture,
AsyncFnOnceOutput,
}
DynMetadata,
)
}
_ => panic!("Unexpected SolverDefId in as_lang_item"),
}
}
fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem> {
as_lang_item!(
SolverTraitLangItem, self, def_id.0;
ignore = {
AsyncFnKindHelper,
AsyncIterator,
BikeshedGuaranteedNoDrop,
FusedIterator,
AsyncFnOnceOutput, // This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver.
}
Sized,
@ -1586,15 +1663,15 @@ impl<'db> Interner for DbInterner<'db> {
AsyncFn,
AsyncFnMut,
AsyncFnOnce,
AsyncFnOnceOutput,
)
}
fn as_adt_lang_item(self, def_id: Self::AdtId) -> Option<SolverAdtLangItem> {
let def_id: AttrDefId = def_id.0.into();
let lang_item = self.db().lang_attr(def_id)?;
let AdtId::EnumId(def_id) = def_id.0 else {
panic!("Unexpected SolverDefId in as_adt_lang_item");
};
as_lang_item!(
SolverAdtLangItem, lang_item;
SolverAdtLangItem, self, def_id;
ignore = {}

View file

@ -5,7 +5,6 @@ use std::ops::ControlFlow;
use hir_def::{
AdtId, HasModule, TypeParamId,
hir::generics::{TypeOrConstParamData, TypeParamProvenance},
lang_item::LangItem,
};
use hir_def::{TraitId, type_ref::Rawness};
use rustc_abi::{Float, Integer, Size};
@ -620,7 +619,7 @@ impl<'db> Ty<'db> {
// FIXME: Should this be here?
pub fn impl_trait_bounds(self, db: &'db dyn HirDatabase) -> Option<Vec<Clause<'db>>> {
let interner = DbInterner::new_with(db, None, None);
let interner = DbInterner::new_no_crate(db);
match self.kind() {
TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => Some(
@ -658,7 +657,7 @@ impl<'db> Ty<'db> {
TyKind::Coroutine(coroutine_id, _args) => {
let InternedCoroutine(owner, _) = coroutine_id.0.loc(db);
let krate = owner.module(db).krate();
if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
if let Some(future_trait) = hir_def::lang_item::lang_items(db, krate).Future {
// This is only used by type walking.
// Parameters will be walked outside, and projection predicate is not used.
// So just provide the Future trait.

View file

@ -9,7 +9,7 @@ use rustc_type_ir::inherent::Ty as _;
use syntax::ast;
use crate::{
ImplTraitId,
ImplTraitId, InferenceResult,
db::{HirDatabase, InternedOpaqueTyId},
lower::{ImplTraitIdx, ImplTraits},
next_solver::{
@ -94,7 +94,7 @@ pub(crate) fn rpit_hidden_types<'db>(
db: &'db dyn HirDatabase,
function: FunctionId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
let infer = db.infer(function.into());
let infer = InferenceResult::for_body(db, function.into());
let mut result = ArenaMap::new();
for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) {
result.insert(opaque, EarlyBinder::bind(hidden_type));
@ -118,7 +118,7 @@ pub(crate) fn tait_hidden_types<'db>(
let loc = type_alias.loc(db);
let module = loc.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
let interner = DbInterner::new_with(db, module.krate());
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
let mut ocx = ObligationCtxt::new(&infcx);
let cause = ObligationCause::dummy();
@ -128,7 +128,7 @@ pub(crate) fn tait_hidden_types<'db>(
let mut result = ArenaMap::with_capacity(taits_count);
for defining_body in defining_bodies {
let infer = db.infer(defining_body);
let infer = InferenceResult::for_body(db, defining_body);
for (&opaque, &hidden_type) in &infer.type_of_opaque {
let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else {
continue;

View file

@ -46,7 +46,7 @@ fn specializes_query(
parent_impl_def_id: ImplId,
) -> bool {
let trait_env = db.trait_environment(specializing_impl_def_id.into());
let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
let interner = DbInterner::new_with(db, trait_env.krate);
let specializing_impl_signature = db.impl_signature(specializing_impl_def_id);
let parent_impl_signature = db.impl_signature(parent_impl_def_id);

View file

@ -1,31 +1,35 @@
//! Stuff for handling `#[target_feature]` (needed for unsafe check).
use std::borrow::Cow;
use std::sync::LazyLock;
use hir_def::attr::Attrs;
use hir_def::tt;
use intern::{Symbol, sym};
use hir_def::FunctionId;
use hir_def::attrs::AttrFlags;
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::db::HirDatabase;
#[derive(Debug, Default, Clone)]
pub struct TargetFeatures {
pub(crate) enabled: FxHashSet<Symbol>,
pub struct TargetFeatures<'db> {
pub(crate) enabled: Cow<'db, FxHashSet<Symbol>>,
}
impl TargetFeatures {
pub fn from_attrs(attrs: &Attrs) -> Self {
let mut result = TargetFeatures::from_attrs_no_implications(attrs);
impl<'db> TargetFeatures<'db> {
pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
let mut result = TargetFeatures::from_fn_no_implications(db, owner);
result.expand_implications();
result
}
fn expand_implications(&mut self) {
let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
let enabled = self.enabled.to_mut();
let mut queue = enabled.iter().cloned().collect::<Vec<_>>();
while let Some(feature) = queue.pop() {
if let Some(implications) = all_implications.get(&feature) {
for implication in implications {
if self.enabled.insert(implication.clone()) {
if enabled.insert(implication.clone()) {
queue.push(implication.clone());
}
}
@ -34,25 +38,9 @@ impl TargetFeatures {
}
/// Retrieves the target features from the attributes, and does not expand the target features implied by them.
pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
let enabled = attrs
.by_key(sym::target_feature)
.tt_values()
.filter_map(|tt| match tt.token_trees().flat_tokens() {
[
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str,
symbol: features,
..
})),
] if enable_ident.sym == sym::enable => Some(features),
_ => None,
})
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
.collect();
Self { enabled }
pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
let enabled = AttrFlags::target_features(db, owner);
Self { enabled: Cow::Borrowed(enabled) }
}
}

View file

@ -38,7 +38,6 @@ use triomphe::Arc;
use crate::{
InferenceResult,
db::HirDatabase,
display::{DisplayTarget, HirDisplay},
infer::{Adjustment, TypeMismatch},
next_solver::Ty,
@ -148,7 +147,7 @@ fn check_impl(
for (def, krate) in defs {
let display_target = DisplayTarget::from_crate(&db, krate);
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = db.infer(def);
let inference_result = InferenceResult::for_body(&db, def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body[pat] {
@ -319,7 +318,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
crate::attach_db(&db, || {
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult<'_>>,
let mut infer_def = |inference_result: &InferenceResult<'_>,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>,
krate: Crate| {
@ -443,7 +442,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
});
for (def, krate) in defs {
let (body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
let infer = InferenceResult::for_body(&db, def);
infer_def(infer, body, source_map, krate);
}
@ -595,13 +594,16 @@ fn salsa_bug() {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
InferenceResult::for_body(
&db,
match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
},
);
});
});
@ -636,13 +638,16 @@ fn salsa_bug() {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
InferenceResult::for_body(
&db,
match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
},
);
});
})
}

Some files were not shown because too many files have changed in this diff Show more