Rewrite attribute handling

Basically, we switch to expanding cfg_attr in AST form, filter irrelevant attributes from the item tree, and move hir-def attributes (non-item-tree) to be flag-based.

The main motivation is memory usage, although this also simplifies the code, and fixes some bugs around handling of `cfg_attr`s.
This commit is contained in:
Chayim Refael Friedman 2025-07-27 20:17:10 +03:00
parent bc662f19bb
commit a10ce57b2b
139 changed files with 5054 additions and 3901 deletions

View file

@ -845,6 +845,7 @@ dependencies = [
name = "hir-expand"
version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"cfg",
"cov-mark",
@ -863,6 +864,7 @@ dependencies = [
"stdx",
"syntax",
"syntax-bridge",
"thin-vec",
"tracing",
"triomphe",
"tt",
@ -2272,9 +2274,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "rowan"
version = "0.15.15"
version = "0.15.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
dependencies = [
"countme",
"hashbrown 0.14.5",

View file

@ -52,7 +52,7 @@ debug = 2
# local crates
macros = { path = "./crates/macros", version = "0.0.0" }
base-db = { path = "./crates/base-db", version = "0.0.0" }
cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] }
hir = { path = "./crates/hir", version = "0.0.0" }
hir-def = { path = "./crates/hir-def", version = "0.0.0" }
hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@ -132,7 +132,7 @@ process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
rowan = "=0.15.15"
rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.24.0", default-features = false, features = [
@ -170,6 +170,7 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features =
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.5.4"
xshell = "0.2.7"
thin-vec = "0.2.14"
petgraph = { version = "0.8.2", default-features = false }
# We need to freeze the version of the crate, as the raw-api feature is considered unstable

View file

@ -0,0 +1,291 @@
//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
//! is interned (so queries can take it) and remembers its crate.
use core::fmt;
use std::hash::{Hash, Hasher};
use span::Edition;
use vfs::FileId;
use crate::{Crate, RootQueryDb};
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EditionedFileId(
salsa::Id,
std::marker::PhantomData<&'static salsa::plumbing::interned::Value<EditionedFileId>>,
);
const _: () = {
use salsa::plumbing as zalsa_;
use zalsa_::interned as zalsa_struct_;
type Configuration_ = EditionedFileId;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EditionedFileIdData {
editioned_file_id: span::EditionedFileId,
krate: Crate,
}
/// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
/// but this poses us a problem.
///
/// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
/// because that will increase their size, which will increase memory usage significantly.
/// Furthermore, things using spans do not generally need the crate: they are using the
/// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
///
/// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
/// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
/// although same hashes can be used for different items, same file ids used for multiple
/// crates is a rare thing, and different items always have different hashes. Then,
/// when we only have a `span::EditionedFileId`, we use the `intern()` method to
/// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
///
/// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
///
/// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
#[derive(Hash, PartialEq, Eq)]
struct WithoutCrate {
editioned_file_id: span::EditionedFileId,
}
impl Hash for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id.hash(state);
}
}
impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(self, state);
}
#[inline]
fn eq(&self, data: &WithoutCrate) -> bool {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id == data.editioned_file_id
}
}
impl zalsa_::HasJar for EditionedFileId {
type Jar = zalsa_struct_::JarImpl<EditionedFileId>;
const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
}
zalsa_::register_jar! {
zalsa_::ErasedJar::erase::<EditionedFileId>()
}
impl zalsa_struct_::Configuration for EditionedFileId {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "EditionedFileId";
const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
const PERSIST: bool = false;
type Fields<'a> = EditionedFileIdData;
type Struct<'db> = EditionedFileId;
fn serialize<S>(_: &Self::Fields<'_>, _: S) -> Result<S::Ok, S::Error>
where
S: zalsa_::serde::Serializer,
{
unimplemented!("attempted to serialize value that set `PERSIST` to false")
}
fn deserialize<'de, D>(_: D) -> Result<Self::Fields<'static>, D::Error>
where
D: zalsa_::serde::Deserializer<'de>,
{
unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
}
}
impl Configuration_ {
pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl<Self> {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<EditionedFileId>> =
zalsa_::IngredientCache::new();
// SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
// ingredient created by our jar is the struct ingredient.
unsafe {
CACHE.get_or_create(zalsa, || {
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>()
})
}
}
}
impl zalsa_::AsId for EditionedFileId {
fn as_id(&self) -> salsa::Id {
self.0.as_id()
}
}
impl zalsa_::FromId for EditionedFileId {
fn from_id(id: salsa::Id) -> Self {
Self(<salsa::Id>::from_id(id), std::marker::PhantomData)
}
}
unsafe impl Send for EditionedFileId {}
unsafe impl Sync for EditionedFileId {}
impl std::fmt::Debug for EditionedFileId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Self::default_debug_fmt(*self, f)
}
}
impl zalsa_::SalsaStructInDb for EditionedFileId {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>().into()
}
fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator<Item = zalsa_::DatabaseKeyIndex> + '_ {
let _ingredient_index =
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>();
<EditionedFileId>::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
}
#[inline]
fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
if type_id == std::any::TypeId::of::<EditionedFileId>() {
Some(<Self as salsa::plumbing::FromId>::from_id(id))
} else {
None
}
}
#[inline]
unsafe fn memo_table(
zalsa: &zalsa_::Zalsa,
id: zalsa_::Id,
current_revision: zalsa_::Revision,
) -> zalsa_::MemoTableWithTypes<'_> {
// SAFETY: Guaranteed by caller.
unsafe {
zalsa.table().memos::<zalsa_struct_::Value<EditionedFileId>>(id, current_revision)
}
}
}
unsafe impl zalsa_::Update for EditionedFileId {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
if unsafe { *old_pointer } != new_value {
unsafe { *old_pointer = new_value };
true
} else {
false
}
}
}
impl EditionedFileId {
pub fn from_span(
db: &(impl salsa::Database + ?Sized),
editioned_file_id: span::EditionedFileId,
krate: Crate,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
EditionedFileIdData { editioned_file_id, krate },
|_, data| data,
)
}
/// Guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
pub fn from_span_guess_origin(
db: &dyn RootQueryDb,
editioned_file_id: span::EditionedFileId,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
WithoutCrate { editioned_file_id },
|_, _| {
// FileId not in the database.
let krate = db
.relevant_crates(editioned_file_id.file_id())
.first()
.copied()
.unwrap_or_else(|| db.all_crates()[0]);
EditionedFileIdData { editioned_file_id, krate }
},
)
}
pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.editioned_file_id
}
pub fn krate(self, db: &dyn salsa::Database) -> Crate {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.krate
}
/// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
zalsa_::with_attached_database(|db| {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
fmt::Debug::fmt(fields, f)
})
.unwrap_or_else(|| {
f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
})
}
}
};
impl EditionedFileId {
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
}
/// Attaches the current edition and guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
#[inline]
pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
#[inline]
pub fn edition(self, db: &dyn salsa::Database) -> Edition {
self.editioned_file_id(db).edition()
}
}

View file

@ -857,9 +857,10 @@ impl CrateGraphBuilder {
}
}
impl BuiltCrateData {
pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
EditionedFileId::new(db, self.root_file_id, self.edition)
impl Crate {
pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
let data = self.data(db);
EditionedFileId::new(db, data.root_file_id, data.edition, self)
}
}

View file

@ -5,6 +5,7 @@ pub use salsa_macros;
// FIXME: Rename this crate, base db is non descriptive
mod change;
mod editioned_file_id;
mod input;
pub mod target;
@ -17,6 +18,7 @@ use std::{
pub use crate::{
change::FileChange,
editioned_file_id::EditionedFileId,
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
@ -29,7 +31,6 @@ pub use query_group::{self};
use rustc_hash::FxHasher;
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use span::Edition;
use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@ -175,42 +176,6 @@ impl Files {
}
}
#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
impl EditionedFileId {
// Salsa already uses the name `new`...
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
}
#[inline]
pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
EditionedFileId::new(db, file_id, Edition::CURRENT)
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
#[inline]
pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
self.editioned_file_id(db).edition()
}
}
#[salsa_macros::input(debug)]
pub struct FileText {
#[returns(ref)]

View file

@ -18,6 +18,7 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
syntax = { workspace = true, optional = true }
intern.workspace = true
[dev-dependencies]

View file

@ -63,6 +63,8 @@ impl From<CfgAtom> for CfgExpr {
}
impl CfgExpr {
// FIXME: Parsing from `tt` is only used in a handful of places, reconsider
// if we should switch them to AST.
#[cfg(feature = "tt")]
pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
@ -73,6 +75,13 @@ impl CfgExpr {
next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
}
#[cfg(feature = "syntax")]
pub fn parse_from_ast(
ast: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> CfgExpr {
next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
}
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
match self {
@ -89,6 +98,56 @@ impl CfgExpr {
}
}
#[cfg(feature = "syntax")]
fn next_cfg_expr_from_ast(
it: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> Option<CfgExpr> {
use intern::sym;
use syntax::{NodeOrToken, SyntaxKind, T, ast};
let name = match it.next() {
None => return None,
Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
Symbol::intern(ident.text())
}
Some(_) => return Some(CfgExpr::Invalid),
};
let ret = match it.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
it.next();
if let Some(NodeOrToken::Token(literal)) = it.peek()
&& matches!(literal.kind(), SyntaxKind::STRING)
{
let literal = tt::token_to_literal(literal.text(), ()).symbol;
it.next();
CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
} else {
return Some(CfgExpr::Invalid);
}
}
Some(NodeOrToken::Node(subtree)) => {
let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
it.next();
let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
match name {
s if s == sym::all => CfgExpr::All(subs.collect()),
s if s == sym::any => CfgExpr::Any(subs.collect()),
s if s == sym::not => {
CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
}
_ => CfgExpr::Invalid,
}
}
_ => CfgAtom::Flag(name).into(),
};
// Eat comma separator
while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
Some(ret)
}
#[cfg(feature = "tt")]
fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
use intern::sym;

View file

@ -1,7 +1,10 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{Expect, expect};
use intern::Symbol;
use syntax::{AstNode, Edition, ast};
use syntax::{
AstNode, Edition,
ast::{self, TokenTreeChildren},
};
use syntax_bridge::{
DocCommentDesugarMode,
dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@ -10,24 +13,33 @@ use syntax_bridge::{
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
#[track_caller]
fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
}
#[track_caller]
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
let cfg = parse_ast_cfg(&tt_ast);
assert_eq!(cfg, expected);
}
#[track_caller]
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -35,13 +47,17 @@ fn check_dnf(input: &str, expect: Expect) {
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
let cfg = parse_ast_cfg(&tt_ast);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}
#[track_caller]
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -50,14 +66,18 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
let cfg = parse_ast_cfg(&tt_ast);
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
}
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@ -66,6 +86,10 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
let cfg = parse_ast_cfg(&tt_ast);
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
}
#[test]

View file

@ -44,7 +44,8 @@ mbe.workspace = true
cfg.workspace = true
tt.workspace = true
span.workspace = true
thin-vec = "0.2.14"
thin-vec.workspace = true
syntax-bridge.workspace = true
[dev-dependencies]
expect-test.workspace = true
@ -52,7 +53,6 @@ expect-test.workspace = true
# local deps
test-utils.workspace = true
test-fixture.workspace = true
syntax-bridge.workspace = true
[features]
in-rust-tree = ["hir-expand/in-rust-tree"]

View file

@ -1,900 +0,0 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{borrow::Cow, convert::identity, hash::Hash, ops};
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
HirFileId, InFile,
attrs::{Attr, AttrId, RawAttrs, collect_attrs},
span_map::SpanMapRef,
};
use intern::{Symbol, sym};
use la_arena::{ArenaMap, Idx, RawIdx};
use mbe::DelimiterKind;
use rustc_abi::ReprOptions;
use span::AstIdNode;
use syntax::{
AstPtr,
ast::{self, HasAttrs},
};
use triomphe::Arc;
use tt::iter::{TtElement, TtIter};
use crate::{
AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
VariantId,
db::DefDatabase,
item_tree::block_item_tree_query,
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource},
};
/// Desugared attributes of an item post `cfg_attr` expansion.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs(RawAttrs);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AttrsWithOwner {
attrs: Attrs,
owner: AttrDefId,
}
impl Attrs {
pub fn new(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Self {
Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
}
pub fn get(&self, id: AttrId) -> Option<&Attr> {
(**self).iter().find(|attr| attr.id == id)
}
pub(crate) fn expand_cfg_attr(
db: &dyn DefDatabase,
krate: Crate,
raw_attrs: RawAttrs,
) -> Attrs {
Attrs(raw_attrs.expand_cfg_attr(db, krate))
}
pub(crate) fn is_cfg_enabled_for(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Result<(), CfgExpr> {
RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
.filter_map(|attr| attr.cfg())
.find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
true => None,
false => Some(cfg),
})
.map_or(Ok(()), Err)
}
}
impl ops::Deref for Attrs {
type Target = [Attr];
fn deref(&self) -> &[Attr] {
&self.0
}
}
impl ops::Deref for AttrsWithOwner {
type Target = Attrs;
fn deref(&self) -> &Attrs {
&self.attrs
}
}
impl Attrs {
pub const EMPTY: Self = Self(RawAttrs::EMPTY);
pub(crate) fn fields_attrs_query(
db: &dyn DefDatabase,
v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
let _p = tracing::info_span!("fields_attrs_query").entered();
let mut res = ArenaMap::default();
let (fields, file_id, krate) = match v {
VariantId::EnumVariantId(it) => {
let loc = it.lookup(db);
let krate = loc.parent.lookup(db).container.krate;
let source = loc.source(db);
(source.value.field_list(), source.file_id, krate)
}
VariantId::StructId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
let source = loc.source(db);
(source.value.field_list(), source.file_id, krate)
}
VariantId::UnionId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
let source = loc.source(db);
(
source.value.record_field_list().map(ast::FieldList::RecordFieldList),
source.file_id,
krate,
)
}
};
let Some(fields) = fields else {
return Arc::new(res);
};
let cfg_options = krate.cfg_options(db);
let span_map = db.span_map(file_id);
match fields {
ast::FieldList::RecordFieldList(fields) => {
let mut idx = 0;
for field in fields.fields() {
let attrs =
Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
if attrs.is_cfg_enabled(cfg_options).is_ok() {
res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
idx += 1;
}
}
}
ast::FieldList::TupleFieldList(fields) => {
let mut idx = 0;
for field in fields.fields() {
let attrs =
Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
if attrs.is_cfg_enabled(cfg_options).is_ok() {
res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
idx += 1;
}
}
}
}
res.shrink_to_fit();
Arc::new(res)
}
}
impl Attrs {
#[inline]
pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
AttrQuery { attrs: self, key }
}
#[inline]
pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
self.iter()
.filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
}
#[inline]
pub fn cfg(&self) -> Option<CfgExpr> {
let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
let first = cfgs.next()?;
match cfgs.next() {
Some(second) => {
let cfgs = [first, second].into_iter().chain(cfgs);
Some(CfgExpr::All(cfgs.collect()))
}
None => Some(first),
}
}
#[inline]
pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
}
#[inline]
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
self.cfgs().try_for_each(|cfg| {
if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
})
}
#[inline]
pub fn lang(&self) -> Option<&Symbol> {
self.by_key(sym::lang).string_value()
}
#[inline]
pub fn lang_item(&self) -> Option<&Symbol> {
self.by_key(sym::lang).string_value()
}
#[inline]
pub fn has_doc_hidden(&self) -> bool {
self.by_key(sym::doc).tt_values().any(|tt| {
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
})
}
#[inline]
pub fn has_doc_notable_trait(&self) -> bool {
self.by_key(sym::doc).tt_values().any(|tt| {
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
})
}
#[inline]
pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ {
self.by_key(sym::doc).tt_values().map(DocExpr::parse)
}
#[inline]
pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ {
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
}
#[inline]
pub fn export_name(&self) -> Option<&Symbol> {
self.by_key(sym::export_name).string_value()
}
#[inline]
pub fn is_proc_macro(&self) -> bool {
self.by_key(sym::proc_macro).exists()
}
#[inline]
pub fn is_proc_macro_attribute(&self) -> bool {
self.by_key(sym::proc_macro_attribute).exists()
}
#[inline]
pub fn is_proc_macro_derive(&self) -> bool {
self.by_key(sym::proc_macro_derive).exists()
}
#[inline]
pub fn is_test(&self) -> bool {
self.iter().any(|it| {
it.path()
.segments()
.iter()
.rev()
.zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
.all(|it| it.0 == it.1)
})
}
#[inline]
pub fn is_ignore(&self) -> bool {
self.by_key(sym::ignore).exists()
}
#[inline]
pub fn is_bench(&self) -> bool {
self.by_key(sym::bench).exists()
}
#[inline]
pub fn is_unstable(&self) -> bool {
self.by_key(sym::unstable).exists()
}
#[inline]
pub fn rustc_legacy_const_generics(&self) -> Option<Box<Box<[u32]>>> {
self.by_key(sym::rustc_legacy_const_generics)
.tt_values()
.next()
.map(parse_rustc_legacy_const_generics)
.filter(|it| !it.is_empty())
.map(Box::new)
}
#[inline]
pub fn repr(&self) -> Option<ReprOptions> {
self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
acc.map_or(Some(repr), |mut acc| {
merge_repr(&mut acc, repr);
Some(acc)
})
})
}
}
fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
let mut indices = Vec::new();
let mut iter = tt.iter();
while let (Some(first), second) = (iter.next(), iter.next()) {
match first {
TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
Ok(index) => indices.push(index),
Err(_) => break,
},
_ => break,
}
if let Some(comma) = second {
match comma {
TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
_ => break,
}
}
}
indices.into_boxed_slice()
}
fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
flags.insert(other.flags);
*align = (*align).max(other.align);
*pack = match (*pack, other.pack) {
(Some(pack), None) | (None, Some(pack)) => Some(pack),
_ => (*pack).min(other.pack),
};
if other.int.is_some() {
*int = other.int;
}
}
fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
use crate::builtin_type::{BuiltinInt, BuiltinUint};
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
match tt.top_subtree().delimiter {
tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
_ => return None,
}
let mut acc = ReprOptions::default();
let mut tts = tt.iter();
while let Some(tt) = tts.next() {
let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
continue;
};
let repr = match &ident.sym {
s if *s == sym::packed => {
let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
tts.next();
if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
lit.symbol.as_str().parse().unwrap_or_default()
} else {
0
}
} else {
0
};
let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
ReprOptions { pack, ..Default::default() }
}
s if *s == sym::align => {
let mut align = None;
if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
tts.next();
if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
&& let Ok(a) = lit.symbol.as_str().parse()
{
align = Align::from_bytes(a).ok();
}
}
ReprOptions { align, ..Default::default() }
}
s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
s if *s == sym::transparent => {
ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
}
s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
repr => {
let mut int = None;
if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
.map(Either::Left)
.or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
{
int = Some(match builtin {
Either::Left(bi) => match bi {
BuiltinInt::Isize => IntegerType::Pointer(true),
BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
},
Either::Right(bu) => match bu {
BuiltinUint::Usize => IntegerType::Pointer(false),
BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
},
});
}
ReprOptions { int, ..Default::default() }
}
};
merge_repr(&mut acc, repr);
}
Some(acc)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DocAtom {
/// eg. `#[doc(hidden)]`
Flag(Symbol),
/// eg. `#[doc(alias = "it")]`
///
/// Note that a key can have multiple values that are all considered "active" at the same time.
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
KeyValue { key: Symbol, value: Symbol },
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DocExpr {
Invalid,
/// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
Atom(DocAtom),
/// eg. `#[doc(alias("x", "y"))]`
Alias(Vec<Symbol>),
}
impl From<DocAtom> for DocExpr {
fn from(atom: DocAtom) -> Self {
DocExpr::Atom(atom)
}
}
impl DocExpr {
fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> DocExpr {
next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
}
pub fn aliases(&self) -> &[Symbol] {
match self {
DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => {
std::slice::from_ref(value)
}
DocExpr::Alias(aliases) => aliases,
_ => &[],
}
}
}
fn next_doc_expr<S: Copy>(mut it: TtIter<'_, S>) -> Option<DocExpr> {
let name = match it.next() {
None => return None,
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
Some(_) => return Some(DocExpr::Invalid),
};
// Peek
let ret = match it.peek() {
Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
it.next();
match it.next() {
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
kind: tt::LitKind::Str,
..
}))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
_ => return Some(DocExpr::Invalid),
}
}
Some(TtElement::Subtree(_, subtree_iter)) => {
it.next();
let subs = parse_comma_sep(subtree_iter);
match &name {
s if *s == sym::alias => DocExpr::Alias(subs),
_ => DocExpr::Invalid,
}
}
_ => DocAtom::Flag(name).into(),
};
Some(ret)
}
fn parse_comma_sep<S>(iter: TtIter<'_, S>) -> Vec<Symbol> {
iter.filter_map(|tt| match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str, symbol, ..
})) => Some(symbol.clone()),
_ => None,
})
.collect()
}
impl AttrsWithOwner {
pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self {
Self { attrs: db.attrs(owner), owner }
}
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
let _p = tracing::info_span!("attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
match def {
AttrDefId::ModuleId(module) => {
let def_map = module.def_map(db);
let mod_data = &def_map[module.local_id];
let raw_attrs = match mod_data.origin {
ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
let decl_attrs = declaration_tree_id
.item_tree(db)
.raw_attrs(declaration.upcast())
.clone();
let tree = db.file_item_tree(definition.into());
let def_attrs = tree.top_level_raw_attrs().clone();
decl_attrs.merge(def_attrs)
}
ModuleOrigin::CrateRoot { definition } => {
let tree = db.file_item_tree(definition.into());
tree.top_level_raw_attrs().clone()
}
ModuleOrigin::Inline { definition_tree_id, definition } => {
definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
}
ModuleOrigin::BlockExpr { id, .. } => {
let tree = block_item_tree_query(db, id);
tree.top_level_raw_attrs().clone()
}
};
Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
}
AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
},
AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::MacroId(it) => match it {
MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
},
AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
// FIXME: We should be never getting `None` here.
Attrs(match src.value.get(it.local_id) {
Some(val) => RawAttrs::new_expanded(
db,
val,
db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
})
}
},
AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
}
}
pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
let owner = match self.owner {
AttrDefId::ModuleId(module) => {
// Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
let def_map = module.def_map(db);
let mod_data = &def_map[module.local_id];
match mod_data.declaration_source(db) {
Some(it) => {
let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
mod_data.definition_source(db)
{
map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
file_id, &file,
)));
}
return map;
}
None => {
let InFile { file_id, value } = mod_data.definition_source(db);
let attrs_owner = match &value {
ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
};
return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
}
}
}
AttrDefId::FieldId(id) => {
let map = db.fields_attrs_source_map(id.parent);
let file_id = id.parent.file_id(db);
let root = db.parse_or_expand(file_id);
let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
InFile::new(file_id, owner)
}
AttrDefId::AdtId(adt) => match adt {
AdtId::StructId(id) => any_has_attrs(db, id),
AdtId::UnionId(id) => any_has_attrs(db, id),
AdtId::EnumId(id) => any_has_attrs(db, id),
},
AttrDefId::FunctionId(id) => any_has_attrs(db, id),
AttrDefId::EnumVariantId(id) => any_has_attrs(db, id),
AttrDefId::StaticId(id) => any_has_attrs(db, id),
AttrDefId::ConstId(id) => any_has_attrs(db, id),
AttrDefId::TraitId(id) => any_has_attrs(db, id),
AttrDefId::TypeAliasId(id) => any_has_attrs(db, id),
AttrDefId::MacroId(id) => match id {
MacroId::Macro2Id(id) => any_has_attrs(db, id),
MacroId::MacroRulesId(id) => any_has_attrs(db, id),
MacroId::ProcMacroId(id) => any_has_attrs(db, id),
},
AttrDefId::ImplId(id) => any_has_attrs(db, id),
AttrDefId::GenericParamId(id) => match id {
GenericParamId::ConstParamId(id) => id
.parent()
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
GenericParamId::TypeParamId(id) => id
.parent()
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
GenericParamId::LifetimeParamId(id) => id
.parent
.child_source(db)
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
},
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
AttrDefId::UseId(id) => any_has_attrs(db, id),
};
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
}
}
#[derive(Debug)]
pub struct AttrSourceMap {
source: Vec<Either<ast::Attr, ast::Comment>>,
file_id: HirFileId,
/// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
/// while `file_id` will be the one of the module declaration site.
/// The usize is the index into `source` from which point on the entries reside in the def site
/// file.
mod_def_site_file_id: Option<(HirFileId, usize)>,
}
impl AttrSourceMap {
fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
Self {
source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
file_id: owner.file_id,
mod_def_site_file_id: None,
}
}
/// Append a second source map to this one, this is required for modules, whose outline and inline
/// attributes can reside in different files
fn append_module_inline_attrs(&mut self, other: Self) {
assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
let len = self.source.len();
self.source.extend(other.source);
if other.file_id != self.file_id {
self.mod_def_site_file_id = Some((other.file_id, len));
}
}
/// Maps the lowered `Attr` back to its original syntax node.
///
/// `attr` must come from the `owner` used for AttrSourceMap
///
/// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
/// the attribute represented by `Attr`.
pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
self.source_of_id(attr.id)
}
pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
let ast_idx = id.ast_index();
let file_id = match self.mod_def_site_file_id {
Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
_ => self.file_id,
};
self.source
.get(ast_idx)
.map(|it| InFile::new(file_id, it))
.unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
}
}
#[derive(Debug, Clone)]
pub struct AttrQuery<'attr> {
attrs: &'attr Attrs,
key: Symbol,
}
impl<'attr> AttrQuery<'attr> {
#[inline]
pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
self.attrs().filter_map(|attr| attr.token_tree_value())
}
#[inline]
pub fn string_value(self) -> Option<&'attr Symbol> {
self.attrs().find_map(|attr| attr.string_value())
}
#[inline]
pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
self.attrs().find_map(|attr| attr.string_value_with_span())
}
#[inline]
pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
self.attrs().find_map(|attr| attr.string_value_unescape())
}
#[inline]
pub fn exists(self) -> bool {
self.attrs().next().is_some()
}
#[inline]
pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
let key = self.key;
self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
}
/// Find string value for a specific key inside token tree
///
/// ```ignore
/// #[doc(html_root_url = "url")]
/// ^^^^^^^^^^^^^ key
/// ```
#[inline]
pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
self.tt_values().find_map(|tt| {
let name = tt.iter()
.skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
.nth(2);
match name {
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
_ => None
}
})
}
}
fn any_has_attrs<'db>(
db: &(dyn DefDatabase + 'db),
id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
) -> InFile<ast::AnyHasAttrs> {
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
}
fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
) -> Attrs {
let loc = lookup.lookup(db);
let source = loc.source(db);
let span_map = db.span_map(source.file_id);
let cfg_options = loc.krate(db).cfg_options(db);
Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
}
pub(crate) fn fields_attrs_source_map(
db: &dyn DefDatabase,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
let mut res = ArenaMap::default();
let child_source = def.child_source(db);
for (idx, variant) in child_source.value.iter() {
res.insert(
idx,
variant
.as_ref()
.either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
);
}
Arc::new(res)
}
#[cfg(test)]
mod tests {
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use intern::Symbol;
use span::EditionedFileId;
use triomphe::Arc;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use span::FileId;
use syntax::{AstNode, TextRange, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
EditionedFileId::current_edition(FileId::from_raw(0)),
)));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::ProcMacro,
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
#[test]
fn test_doc_expr_parser() {
assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into());
assert_parse_result(
r#"#![doc(alias = "foo")]"#,
DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(),
);
assert_parse_result(
r#"#![doc(alias("foo"))]"#,
DocExpr::Alias([Symbol::intern("foo")].into()),
);
assert_parse_result(
r#"#![doc(alias("foo", "bar", "baz"))]"#,
DocExpr::Alias(
[Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(),
),
);
assert_parse_result(
r#"
#[doc(alias("Bar", "Qux"))]
struct Foo;"#,
DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()),
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,23 +1,21 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either;
use hir_expand::{
EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
db::ExpandDatabase,
};
use intern::sym;
use la_arena::ArenaMap;
use syntax::{AstPtr, ast};
use triomphe::Arc;
use crate::{
AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId,
TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
attr::{Attrs, AttrsWithOwner},
AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId,
EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc,
InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
VariantId,
attrs::AttrFlags,
expr_store::{
Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
},
@ -29,7 +27,6 @@ use crate::{
ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
StructSignature, TraitSignature, TypeAliasSignature, UnionSignature,
},
tt,
visibility::{self, Visibility},
};
@ -237,24 +234,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
def: GenericDefId,
) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>);
// region:attrs
#[salsa::invoke(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query?
#[salsa::invoke(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map(
&self,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
// endregion:attrs
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
@ -297,36 +276,9 @@ fn include_macro_invoc(
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.top_level_raw_attrs();
for attr in &**attrs {
match attr.path().as_ident() {
Some(ident) if *ident == sym::no_std => return true,
Some(ident) if *ident == sym::cfg_attr => {}
_ => continue,
}
// This is a `cfg_attr`; check if it could possibly expand to `no_std`.
// Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
let tt = match attr.token_tree_value() {
Some(tt) => tt.token_trees(),
None => continue,
};
let segments =
tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
for output in segments.skip(1) {
match output.flat_tokens() {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
return true;
}
_ => {}
}
}
}
false
let root_module = CrateRootModuleId::from(crate_id).module(db);
let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module)));
attrs.contains(AttrFlags::IS_NO_STD)
}
fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {

View file

@ -17,11 +17,10 @@ use syntax::{AstNode, Parse, ast};
use triomphe::Arc;
use tt::TextRange;
use crate::attr::Attrs;
use crate::expr_store::HygieneId;
use crate::macro_call_as_call_id;
use crate::nameres::DefMap;
use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
use crate::{
MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId,
macro_call_as_call_id, nameres::DefMap,
};
#[derive(Debug)]
pub(super) struct Expander {
@ -70,11 +69,10 @@ impl Expander {
pub(super) fn is_cfg_enabled(
&self,
db: &dyn DefDatabase,
has_attrs: &dyn HasAttrs,
owner: &dyn HasAttrs,
cfg_options: &CfgOptions,
) -> Result<(), cfg::CfgExpr> {
Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
AttrFlags::is_cfg_enabled_for(owner, cfg_options)
}
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {

View file

@ -12,7 +12,6 @@ use cfg::CfgOptions;
use either::Either;
use hir_expand::{
HirFileId, InFile, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
span_map::SpanMapRef,
};
@ -34,6 +33,7 @@ use tt::TextRange;
use crate::{
AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
attrs::AttrFlags,
builtin_type::BuiltinUint,
db::DefDatabase,
expr_store::{
@ -87,14 +87,16 @@ pub(super) fn lower_body(
let mut params = vec![];
let mut collector = ExprCollector::new(db, module, current_file_id);
let skip_body = match owner {
DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
DefWithBodyId::StaticId(it) => db.attrs(it.into()),
DefWithBodyId::ConstId(it) => db.attrs(it.into()),
DefWithBodyId::VariantId(it) => db.attrs(it.into()),
}
.rust_analyzer_tool()
.any(|attr| *attr.path() == tool_path![skip]);
let skip_body = AttrFlags::query(
db,
match owner {
DefWithBodyId::FunctionId(it) => it.into(),
DefWithBodyId::StaticId(it) => it.into(),
DefWithBodyId::ConstId(it) => it.into(),
DefWithBodyId::VariantId(it) => it.into(),
},
)
.contains(AttrFlags::RUST_ANALYZER_SKIP);
// If #[rust_analyzer::skip] annotated, only construct enough information for the signature
// and skip the body.
if skip_body {
@ -2498,7 +2500,7 @@ impl<'db> ExprCollector<'db> {
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
/// not.
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options);
match enabled {
Ok(()) => true,
Err(cfg) => {

View file

@ -12,7 +12,8 @@ use span::Edition;
use syntax::ast::{HasName, RangeOp};
use crate::{
AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId,
attrs::AttrFlags,
expr_store::path::{GenericArg, GenericArgs},
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@ -167,7 +168,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::AdtId(id) => match id {
AdtId::StructId(id) => {
let signature = db.struct_signature(id);
print_struct(db, &signature, edition)
print_struct(db, id, &signature, edition)
}
AdtId::UnionId(id) => {
format!("unimplemented {id:?}")
@ -179,7 +180,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
GenericDefId::FunctionId(id) => {
let signature = db.function_signature(id);
print_function(db, &signature, edition)
print_function(db, id, &signature, edition)
}
GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
@ -208,7 +209,8 @@ pub fn print_path(
pub fn print_struct(
db: &dyn DefDatabase,
StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
id: StructId,
StructSignature { name, generic_params, store, flags, shape }: &StructSignature,
edition: Edition,
) -> String {
let mut p = Printer {
@ -219,7 +221,7 @@ pub fn print_struct(
line_format: LineFormat::Newline,
edition,
};
if let Some(repr) = repr {
if let Some(repr) = AttrFlags::repr(db, id.into()) {
if repr.c() {
wln!(p, "#[repr(C)]");
}
@ -255,7 +257,8 @@ pub fn print_struct(
pub fn print_function(
db: &dyn DefDatabase,
FunctionSignature {
id: FunctionId,
signature @ FunctionSignature {
name,
generic_params,
store,
@ -263,10 +266,10 @@ pub fn print_function(
ret_type,
abi,
flags,
legacy_const_generics_indices,
}: &FunctionSignature,
edition: Edition,
) -> String {
let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id);
let mut p = Printer {
db,
store,
@ -298,7 +301,7 @@ pub fn print_function(
if i != 0 {
w!(p, ", ");
}
if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) {
w!(p, "const: ");
}
p.print_type_ref(*param);

View file

@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);

View file

@ -38,14 +38,24 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe
match def {
GenericDefId::AdtId(adt_id) => match adt_id {
crate::AdtId::StructId(struct_id) => {
out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
out += &print_struct(
&db,
struct_id,
&db.struct_signature(struct_id),
Edition::CURRENT,
);
}
crate::AdtId::UnionId(_id) => (),
crate::AdtId::EnumId(_id) => (),
},
GenericDefId::ConstId(_id) => (),
GenericDefId::FunctionId(function_id) => {
out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
out += &print_function(
&db,
function_id,
&db.function_signature(function_id),
Edition::CURRENT,
)
}
GenericDefId::ImplId(_id) => (),

View file

@ -13,7 +13,8 @@ use stdx::format_to;
use triomphe::Arc;
use crate::{
AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId,
attrs::AttrFlags,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::{DefMap, assoc::TraitItems, crate_def_map},
@ -165,17 +166,34 @@ impl ImportMap {
}
} else {
match item {
ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
ItemInNs::Types(id) | ItemInNs::Values(id) => match id {
ModuleDefId::ModuleId(it) => {
Some(AttrDefId::ModuleId(InternedModuleId::new(db, it)))
}
ModuleDefId::FunctionId(it) => Some(it.into()),
ModuleDefId::AdtId(it) => Some(it.into()),
ModuleDefId::EnumVariantId(it) => Some(it.into()),
ModuleDefId::ConstId(it) => Some(it.into()),
ModuleDefId::StaticId(it) => Some(it.into()),
ModuleDefId::TraitId(it) => Some(it.into()),
ModuleDefId::TypeAliasId(it) => Some(it.into()),
ModuleDefId::MacroId(it) => Some(it.into()),
ModuleDefId::BuiltinType(_) => None,
},
ItemInNs::Macros(id) => Some(id.into()),
}
};
let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
None => (false, false, Complete::Yes),
Some(attr_id) => {
let attrs = db.attrs(attr_id);
let attrs = AttrFlags::query(db, attr_id);
let do_not_complete =
Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
(attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs);
(
attrs.contains(AttrFlags::IS_DOC_HIDDEN),
attrs.contains(AttrFlags::IS_UNSTABLE),
do_not_complete,
)
}
};
@ -239,15 +257,15 @@ impl ImportMap {
};
let attr_id = item.into();
let attrs = &db.attrs(attr_id);
let attrs = AttrFlags::query(db, attr_id);
let item_do_not_complete = Complete::extract(false, attrs);
let do_not_complete =
Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN),
is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE),
complete: do_not_complete,
};

View file

@ -30,6 +30,7 @@
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
//! surface syntax.
mod attrs;
mod lower;
mod pretty;
#[cfg(test)]
@ -43,10 +44,8 @@ use std::{
};
use ast::{AstNode, StructKind};
use base_db::Crate;
use hir_expand::{
ExpandTo, HirFileId,
attrs::RawAttrs,
mod_path::{ModPath, PathKind},
name::Name,
};
@ -59,9 +58,12 @@ use syntax::{SyntaxKind, ast, match_ast};
use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
use crate::{BlockId, Lookup, db::DefDatabase};
pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
pub(crate) use crate::item_tree::{
attrs::*,
lower::{lower_use_tree, visibility_from_ast},
};
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct RawVisibilityId(u32);
@ -96,7 +98,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
let top_attrs = ctx.lower_attrs(&file);
let mut item_tree = ctx.lower_module_items(&file);
item_tree.top_attrs = top_attrs;
item_tree
@ -132,7 +134,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
top_attrs: RawAttrs::EMPTY,
top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@ -168,7 +170,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
top_attrs: RawAttrs::EMPTY,
top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@ -182,8 +184,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
#[derive(Debug, Default, Eq, PartialEq)]
pub struct ItemTree {
top_level: Box<[ModItemId]>,
top_attrs: RawAttrs,
attrs: FxHashMap<FileAstId<ast::Item>, RawAttrs>,
top_attrs: AttrsOrCfg,
attrs: FxHashMap<FileAstId<ast::Item>, AttrsOrCfg>,
vis: ItemVisibilities,
big_data: FxHashMap<FileAstId<ast::Item>, BigModItem>,
small_data: FxHashMap<FileAstId<ast::Item>, SmallModItem>,
@ -197,26 +199,12 @@ impl ItemTree {
}
/// Returns the inner attributes of the source file.
pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg {
&self.top_attrs
}
/// Returns the inner attributes of the source file.
pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
}
pub(crate) fn raw_attrs(&self, of: FileAstId<ast::Item>) -> &RawAttrs {
self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
}
pub(crate) fn attrs(
&self,
db: &dyn DefDatabase,
krate: Crate,
of: FileAstId<ast::Item>,
) -> Attrs {
Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
pub(crate) fn attrs(&self, of: FileAstId<ast::Item>) -> Option<&AttrsOrCfg> {
self.attrs.get(&of)
}
/// Returns a count of a few, expensive items.

View file

@ -0,0 +1,220 @@
//! Defines attribute helpers for name resolution.
//!
//! Notice we don't preserve all attributes for name resolution, to save space:
//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes)
//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`].
use std::{
borrow::Cow,
convert::Infallible,
ops::{self, ControlFlow},
};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
mod_path::ModPath,
name::Name,
span_map::SpanMapRef,
};
use intern::{Interned, Symbol, sym};
use syntax::{AstNode, T, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
use crate::{db::DefDatabase, item_tree::lower::Ctx};
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum AttrsOrCfg {
Enabled {
attrs: AttrsOwned,
},
/// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.)
CfgDisabled(Box<(CfgExpr, AttrsOwned)>),
}
impl Default for AttrsOrCfg {
#[inline]
fn default() -> Self {
AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
}
}
impl AttrsOrCfg {
pub(crate) fn lower<'a>(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
cfg_options: &dyn Fn() -> &'a CfgOptions,
span_map: SpanMapRef<'_>,
) -> AttrsOrCfg {
let mut attrs = Vec::new();
let result =
collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
// NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
// tracking.
let (span, path_range, input) = match meta {
Meta::NamedKeyValue { path_range, name: _, value } => {
let span = span_map.span_for_range(path_range);
let input = value.map(|value| {
Box::new(AttrInput::Literal(token_to_literal(
value.text(),
span_map.span_for_range(value.text_range()),
)))
});
(span, path_range, input)
}
Meta::TokenTree { path, tt } => {
let span = span_map.span_for_range(path.range);
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
let input = Some(Box::new(AttrInput::TokenTree(tt)));
(span, path.range, input)
}
Meta::Path { path } => {
let span = span_map.span_for_range(path.range);
(span, path.range, None)
}
};
let path = container.token_at_offset(path_range.start()).right_biased().and_then(
|first_path_token| {
let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
let segments =
std::iter::successors(Some(first_path_token), |it| it.next_token())
.take_while(|it| it.text_range().end() <= path_range.end())
.filter(|it| it.kind().is_any_identifier());
ModPath::from_tokens(
db,
&mut |range| span_map.span_for_range(range).ctx,
is_abs,
segments,
)
},
);
let path = path.unwrap_or_else(|| Name::missing().into());
attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
ControlFlow::Continue(())
});
let attrs = AttrsOwned(attrs.into_boxed_slice());
match result {
Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
None => AttrsOrCfg::Enabled { attrs },
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct AttrsOwned(Box<[Attr]>);
#[derive(Debug, Clone, Copy)]
pub(crate) struct Attrs<'a>(&'a [Attr]);
impl ops::Deref for Attrs<'_> {
type Target = [Attr];
#[inline]
fn deref(&self) -> &Self::Target {
self.0
}
}
impl Ctx<'_> {
#[inline]
pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg {
AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map())
}
}
impl AttrsOwned {
#[inline]
pub(crate) fn as_ref(&self) -> Attrs<'_> {
Attrs(&self.0)
}
}
impl<'a> Attrs<'a> {
pub(crate) const EMPTY: Self = Attrs(&[]);
#[inline]
pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> {
AttrQuery { attrs: self, key }
}
#[inline]
pub(crate) fn iter(self) -> impl Iterator<Item = (AttrId, &'a Attr)> {
self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr))
}
#[inline]
pub(crate) fn iter_after(
self,
after: Option<AttrId>,
) -> impl Iterator<Item = (AttrId, &'a Attr)> {
let skip = after.map_or(0, |after| after.item_tree_index() + 1);
self.0[skip as usize..]
.iter()
.enumerate()
.map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr))
}
#[inline]
pub(crate) fn is_proc_macro(&self) -> bool {
self.by_key(sym::proc_macro).exists()
}
#[inline]
pub(crate) fn is_proc_macro_attribute(&self) -> bool {
self.by_key(sym::proc_macro_attribute).exists()
}
}
#[derive(Debug, Clone)]
pub(crate) struct AttrQuery<'attr> {
attrs: Attrs<'attr>,
key: Symbol,
}
impl<'attr> AttrQuery<'attr> {
#[inline]
pub(crate) fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
self.attrs().filter_map(|attr| attr.token_tree_value())
}
#[inline]
pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
self.attrs().find_map(|attr| attr.string_value_with_span())
}
#[inline]
pub(crate) fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
self.attrs().find_map(|attr| attr.string_value_unescape())
}
#[inline]
pub(crate) fn exists(self) -> bool {
self.attrs().next().is_some()
}
#[inline]
pub(crate) fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
let key = self.key;
self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
}
}
impl AttrsOrCfg {
#[inline]
pub(super) fn empty() -> Self {
AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
}
#[inline]
pub(super) fn is_empty(&self) -> bool {
matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty())
}
}

View file

@ -1,8 +1,9 @@
//! AST -> `ItemTree` lowering code.
use std::{cell::OnceCell, collections::hash_map::Entry};
use std::cell::OnceCell;
use base_db::FxIndexSet;
use cfg::CfgOptions;
use hir_expand::{
HirFileId,
mod_path::PathKind,
@ -22,18 +23,19 @@ use crate::{
item_tree::{
BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
VisibilityExplicitness,
ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct,
StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness,
attrs::AttrsOrCfg,
},
};
pub(super) struct Ctx<'a> {
db: &'a dyn DefDatabase,
pub(super) db: &'a dyn DefDatabase,
tree: ItemTree,
source_ast_id_map: Arc<AstIdMap>,
span_map: OnceCell<SpanMap>,
file: HirFileId,
cfg_options: OnceCell<&'a CfgOptions>,
top_level: Vec<ModItemId>,
visibilities: FxIndexSet<RawVisibility>,
}
@ -45,12 +47,18 @@ impl<'a> Ctx<'a> {
tree: ItemTree::default(),
source_ast_id_map: db.ast_id_map(file),
file,
cfg_options: OnceCell::new(),
span_map: OnceCell::new(),
visibilities: FxIndexSet::default(),
top_level: Vec::new(),
}
}
#[inline]
pub(super) fn cfg_options(&self) -> &'a CfgOptions {
self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
}
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
}
@ -98,7 +106,7 @@ impl<'a> Ctx<'a> {
}
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
self.tree.top_attrs = self.lower_attrs(block);
self.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@ -144,22 +152,15 @@ impl<'a> Ctx<'a> {
// FIXME: Handle `global_asm!()`.
ast::Item::AsmExpr(_) => return None,
};
let attrs = RawAttrs::new(self.db, item, self.span_map());
let attrs = self.lower_attrs(item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
}
fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: RawAttrs) {
fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: AttrsOrCfg) {
if !attrs.is_empty() {
match self.tree.attrs.entry(item) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
Entry::Vacant(entry) => {
entry.insert(attrs);
}
}
self.tree.attrs.insert(item, attrs);
}
}
@ -352,7 +353,7 @@ impl<'a> Ctx<'a> {
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
let attrs = RawAttrs::new(self.db, &item, self.span_map());
let attrs = self.lower_attrs(&item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
})

View file

@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId};
use crate::{
item_tree::{
Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct,
Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg,
},
visibility::RawVisibility,
};
@ -85,9 +85,13 @@ impl Printer<'_> {
}
}
fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
let AttrsOrCfg::Enabled { attrs } = attrs else {
w!(self, "#[cfg(false)]{separated_by}");
return;
};
let inner = if inner { "!" } else { "" };
for attr in &**attrs {
for attr in &*attrs.as_ref() {
w!(
self,
"#{}[{}{}]{}",

View file

@ -30,10 +30,8 @@ use crate::{A, B};
use a::{c, d::{e}};
"#,
expect![[r##"
#![doc = " file comment"]
expect![[r#"
#![no_std]
#![doc = " another file comment"]
// AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
@ -47,13 +45,12 @@ use a::{c, d::{e}};
// AstId: Use[0000, 1]
pub(self) use globs::*;
#[doc = " docs on import"]
// AstId: Use[0000, 2]
pub(self) use crate::{A, B};
// AstId: Use[0000, 3]
pub(self) use a::{c, d::{e}};
"##]],
"#]],
);
}
@ -195,8 +192,6 @@ mod inline {
mod outline;
"#,
expect![[r##"
#[doc = " outer"]
#[doc = " inner"]
// AstId: Module[03AE, 0]
pub(self) mod inline {
// AstId: Use[0000, 0]

View file

@ -8,6 +8,7 @@ use stdx::impl_from;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
StaticId, StructId, TraitId, TypeAliasId, UnionId,
attrs::AttrFlags,
db::DefDatabase,
nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
};
@ -127,7 +128,7 @@ impl LangItems {
T: Into<AttrDefId> + Into<LangItemTarget> + Copy,
{
let _p = tracing::info_span!("collect_lang_item").entered();
if let Some(lang_item) = db.attrs(item.into()).lang_item() {
if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) {
self.assign_lang_item(lang_item, item.into());
}
}
@ -142,7 +143,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
for (_, module_data) in crate_def_map.modules() {
for def in module_data.scope.declarations() {
if let ModuleDefId::TraitId(trait_) = def
&& db.attrs(trait_.into()).has_doc_notable_trait()
&& AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
{
traits.push(trait_);
}
@ -177,10 +178,10 @@ macro_rules! language_item_table {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
}
fn assign_lang_item(&mut self, name: &Symbol, target: LangItemTarget) {
fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
match name {
$(
_ if *name == $module::$name => {
_ if name == $module::$name => {
if let LangItemTarget::$target(target) = target {
self.$lang_item = Some(target);
}
@ -206,6 +207,14 @@ macro_rules! language_item_table {
$( LangItemEnum::$lang_item => lang_items.$lang_item.map(Into::into), )*
}
}
#[inline]
pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
match symbol {
$( _ if *symbol == $module::$name => Some(Self::$lang_item), )*
_ => None,
}
}
}
}
}

View file

@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi;
pub mod db;
pub mod attr;
pub mod attrs;
pub mod builtin_type;
pub mod item_scope;
pub mod per_ns;
@ -45,7 +45,7 @@ pub mod find_path;
pub mod import_map;
pub mod visibility;
use intern::{Interned, Symbol, sym};
use intern::{Interned, Symbol};
pub use rustc_abi as layout;
use thin_vec::ThinVec;
use triomphe::Arc;
@ -80,7 +80,7 @@ use syntax::{AstNode, ast};
pub use hir_expand::{Intern, Lookup, tt};
use crate::{
attr::Attrs,
attrs::AttrFlags,
builtin_type::BuiltinType,
db::DefDatabase,
expr_store::ExpressionStoreSourceMap,
@ -956,10 +956,16 @@ impl CallableDefId {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
// FIXME: We probably should use this in more places.
/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything.
#[salsa_macros::interned(debug, no_lifetime)]
pub struct InternedModuleId {
pub loc: ModuleId,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)]
pub enum AttrDefId {
ModuleId(ModuleId),
FieldId(FieldId),
ModuleId(InternedModuleId),
AdtId(AdtId),
FunctionId(FunctionId),
EnumVariantId(EnumVariantId),
@ -969,15 +975,12 @@ pub enum AttrDefId {
TypeAliasId(TypeAliasId),
MacroId(MacroId),
ImplId(ImplId),
GenericParamId(GenericParamId),
ExternBlockId(ExternBlockId),
ExternCrateId(ExternCrateId),
UseId(UseId),
}
impl_from!(
ModuleId,
FieldId,
AdtId(StructId, EnumId, UnionId),
EnumVariantId,
StaticId,
@ -987,41 +990,11 @@ impl_from!(
TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId,
GenericParamId,
ExternCrateId,
UseId
for AttrDefId
);
impl TryFrom<ModuleDefId> for AttrDefId {
type Error = ();
fn try_from(value: ModuleDefId) -> Result<Self, Self::Error> {
match value {
ModuleDefId::ModuleId(it) => Ok(it.into()),
ModuleDefId::FunctionId(it) => Ok(it.into()),
ModuleDefId::AdtId(it) => Ok(it.into()),
ModuleDefId::EnumVariantId(it) => Ok(it.into()),
ModuleDefId::ConstId(it) => Ok(it.into()),
ModuleDefId::StaticId(it) => Ok(it.into()),
ModuleDefId::TraitId(it) => Ok(it.into()),
ModuleDefId::TypeAliasId(it) => Ok(it.into()),
ModuleDefId::MacroId(id) => Ok(id.into()),
ModuleDefId::BuiltinType(_) => Err(()),
}
}
}
impl From<ItemContainerId> for AttrDefId {
fn from(acid: ItemContainerId) -> Self {
match acid {
ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
}
}
}
impl From<AssocItemId> for AttrDefId {
fn from(assoc: AssocItemId) -> Self {
match assoc {
@ -1262,8 +1235,7 @@ impl HasModule for GenericDefId {
impl HasModule for AttrDefId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self {
AttrDefId::ModuleId(it) => *it,
AttrDefId::FieldId(it) => it.parent.module(db),
AttrDefId::ModuleId(it) => it.loc(db),
AttrDefId::AdtId(it) => it.module(db),
AttrDefId::FunctionId(it) => it.module(db),
AttrDefId::EnumVariantId(it) => it.module(db),
@ -1273,12 +1245,6 @@ impl HasModule for AttrDefId {
AttrDefId::TypeAliasId(it) => it.module(db),
AttrDefId::ImplId(it) => it.module(db),
AttrDefId::ExternBlockId(it) => it.module(db),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::TypeParamId(it) => it.parent(),
GenericParamId::ConstParamId(it) => it.parent(),
GenericParamId::LifetimeParamId(it) => it.parent,
}
.module(db),
AttrDefId::MacroId(it) => it.module(db),
AttrDefId::ExternCrateId(it) => it.module(db),
AttrDefId::UseId(it) => it.module(db),
@ -1402,32 +1368,18 @@ pub enum Complete {
}
impl Complete {
pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
let mut do_not_complete = Complete::Yes;
for ra_attr in attrs.rust_analyzer_tool() {
let segments = ra_attr.path.segments();
if segments.len() != 2 {
continue;
}
let action = segments[1].symbol();
if *action == sym::completions {
match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
if ident.sym == sym::ignore_flyimport {
do_not_complete = Complete::IgnoreFlyimport;
} else if is_trait {
if ident.sym == sym::ignore_methods {
do_not_complete = Complete::IgnoreMethods;
} else if ident.sym == sym::ignore_flyimport_methods {
do_not_complete = Complete::IgnoreFlyimportMethods;
}
}
}
_ => {}
}
#[inline]
pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete {
if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) {
return Complete::IgnoreFlyimport;
} else if is_trait {
if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) {
return Complete::IgnoreMethods;
} else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) {
return Complete::IgnoreFlyimportMethods;
}
}
do_not_complete
Complete::Yes
}
#[inline]

View file

@ -300,21 +300,21 @@ fn match_by_first_token_literally() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { = bar }
m! { + Baz }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -326,21 +326,21 @@ fn match_by_last_token_literally() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { bar = }
m! { Baz + }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -352,21 +352,21 @@ fn match_by_ident() {
check(
r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
m! { foo }
m! { Foo }
m! { spam bar }
m! { eggs Baz }
"#,
expect![[r#"
macro_rules! m {
($i:ident) => ( mod $i {} );
($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
mod foo {}
enum Foo {}
fn bar() {}
struct Baz;
"#]],
@ -378,12 +378,12 @@ fn match_by_separator_token() {
check(
r#"
macro_rules! m {
($($i:ident),*) => ($(mod $i {} )*);
($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
m! { foo, bar }
m! { Baz, Qux }
m! { foo# bar }
@ -391,13 +391,13 @@ m! { Foo,# Bar }
"#,
expect![[r#"
macro_rules! m {
($($i:ident),*) => ($(mod $i {} )*);
($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
mod foo {}
mod bar {}
enum Baz {}
enum Qux {}
fn foo() {}
fn bar() {}
@ -1114,11 +1114,11 @@ fn test_single_item() {
check(
r#"
macro_rules! m { ($i:item) => ( $i ) }
m! { mod c {} }
m! { struct C {} }
"#,
expect![[r#"
macro_rules! m { ($i:item) => ( $i ) }
mod c {}
struct C {}
"#]],
)
}
@ -1144,6 +1144,7 @@ m! {
type T = u8;
}
"#,
// The modules are counted twice, once because of the module and once because of the macro call.
expect![[r#"
macro_rules! m { ($($i:item)*) => ($($i )*) }
extern crate a;
@ -1161,7 +1162,9 @@ trait J {}
fn h() {}
extern {}
type T = u8;
"#]],
mod b;
mod c {}"#]],
);
}

View file

@ -245,6 +245,21 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
}
for (_, module) in def_map.modules() {
let Some(src) = module.declaration_source(&db) else {
continue;
};
if let Some(macro_file) = src.file_id.macro_file() {
let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(macro_file.into()).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp)
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if let Some(macro_file) = src.file_id.macro_file()

View file

@ -9,37 +9,65 @@ use crate::macro_expansion_tests::{check, check_errors};
#[test]
fn attribute_macro_attr_censoring() {
cov_mark::check!(attribute_macro_attr_censoring);
check(
r#"
//- proc_macros: identity
#[attr1] #[proc_macros::identity] #[attr2]
struct S;
"#,
expect![[r#"
#[attr1] #[proc_macros::identity] #[attr2]
//- minicore: derive
#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
/// Foo
#[cfg_attr(false, doc = "abc...", attr1)]
mod foo {
#![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
#![cfg_attr(true, doc = "123...", attr2)]
#![attr3]
#[cfg_attr(true, cfg(false))]
fn foo() {}
#[cfg(true)]
fn bar() {}
}
"#,
expect![[r##"
#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
/// Foo
#[cfg_attr(false, doc = "abc...", attr1)]
mod foo {
#![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
#![cfg_attr(true, doc = "123...", attr2)]
#![attr3]
#[cfg_attr(true, cfg(false))]
fn foo() {}
#[cfg(true)]
fn bar() {}
}
#[attr1]
#[attr2] struct S;"#]],
#[attr2] struct S;
#[doc = " Foo"] mod foo {
# ![foo]
# ![doc = "123..."]
# ![attr2]
# ![attr3]
#[cfg_attr(true , cfg(false ))] fn foo() {}
#[cfg(true )] fn bar() {}
}"##]],
);
}
#[test]
fn derive_censoring() {
cov_mark::check!(derive_censoring);
check(
r#"
//- proc_macros: derive_identity
//- minicore:derive
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
#[derive(Bar)]
#[attr2]
struct S;
"#,
expect![[r#"
use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
@ -47,6 +75,60 @@ struct S;
#[attr2]
struct S;
#[my_cool_derive()]
#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
#[my_cool_derive()]
struct Foo {
#[cfg_attr(false, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, attr3)]
v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
v3: Foo<{
#[cfg(false)]
let foo = 123;
456
}>,
#[cfg(false)]
v4: bool // No comma here
}
"#,
expect![[r#"
use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
#[derive(Bar)]
#[attr2]
struct S;
#[my_cool_derive()]
#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
#[my_cool_derive()]
struct Foo {
#[cfg_attr(false, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, cfg(false), attr2)]
v1: i32,
#[cfg_attr(true, attr3)]
v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
v3: Foo<{
#[cfg(false)]
let foo = 123;
456
}>,
#[cfg(false)]
v4: bool // No comma here
}
#[attr1]
#[my_cool_derive()] struct Foo {
v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
456
}
>,
}
#[attr1]
#[derive(Bar)]
#[attr2] struct S;"#]],
@ -87,7 +169,7 @@ fn foo() { bar.; blub }
fn foo() { bar.; blub }
fn foo() {
bar. ;
bar.;
blub
}"#]],
);

View file

@ -391,19 +391,14 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
)
.entered();
let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
Visibility::Public,
);
let root_file_id = crate_id.root_file_id(db);
let module_data =
ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public);
let def_map =
DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
let (def_map, local_def_map) = collector::collect_defs(
db,
def_map,
TreeId::new(krate.root_file_id(db).into(), None),
None,
);
let (def_map, local_def_map) =
collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None);
DefMapPair::new(db, def_map, local_def_map)
}

View file

@ -4,7 +4,8 @@ use std::mem;
use cfg::CfgOptions;
use hir_expand::{
AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind,
MacroDefKind,
mod_path::ModPath,
name::{AsName, Name},
span_map::SpanMap,
@ -21,8 +22,8 @@ use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
attr::Attrs,
db::DefDatabase,
item_tree::AttrsOrCfg,
macro_call_as_call_id,
nameres::{
DefMap, LocalDefMap, MacroSubNs,
@ -191,19 +192,22 @@ impl<'a> AssocItemCollector<'a> {
fn collect_item(&mut self, item: ast::AssocItem) {
let ast_id = self.ast_id_map.ast_id(&item);
let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.file_id, ast_id.erase()),
cfg,
self.cfg_options.clone(),
));
return;
}
let attrs =
match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) {
AttrsOrCfg::Enabled { attrs } => attrs,
AttrsOrCfg::CfgDisabled(cfg) => {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.file_id, ast_id.erase()),
cfg.0,
self.cfg_options.clone(),
));
return;
}
};
let ast_id = InFile::new(self.file_id, ast_id.upcast());
'attrs: for attr in &*attrs {
'attrs: for (attr_id, attr) in attrs.as_ref().iter() {
let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
@ -212,6 +216,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
ast_id_with_path,
attr,
attr_id,
) {
Ok(ResolvedAttr::Macro(call_id)) => {
let loc = self.db.lookup_intern_macro_call(call_id);
@ -240,8 +245,12 @@ impl<'a> AssocItemCollector<'a> {
Err(_) => {
self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
self.module_id.local_id,
MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
attr.path().clone(),
MacroCallKind::Attr {
ast_id,
attr_args: None,
censored_attr_ids: AttrMacroAttrIds::from_one(attr_id),
},
(*attr.path).clone(),
));
}
}

View file

@ -2,7 +2,7 @@
use base_db::Crate;
use hir_expand::{
MacroCallId, MacroCallKind, MacroDefId,
AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId,
attrs::{Attr, AttrId, AttrInput},
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@ -28,6 +28,7 @@ pub enum ResolvedAttr {
}
impl DefMap {
/// This cannot be used to resolve items that allow derives.
pub(crate) fn resolve_attr_macro(
&self,
local_def_map: &LocalDefMap,
@ -35,6 +36,7 @@ impl DefMap {
original_module: LocalModuleId,
ast_id: AstIdWithPath<ast::Item>,
attr: &Attr,
attr_id: AttrId,
) -> Result<ResolvedAttr, UnresolvedMacro> {
// NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
@ -68,6 +70,9 @@ impl DefMap {
db,
&ast_id,
attr,
// There aren't any active attributes before this one, because attribute macros
// replace their input, and derive macros are not allowed in this function.
AttrMacroAttrIds::from_one(attr_id),
self.krate,
db.macro_def(def),
)))
@ -102,6 +107,7 @@ pub(super) fn attr_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
censored_attr_ids: AttrMacroAttrIds,
krate: Crate,
def: MacroDefId,
) -> MacroCallId {
@ -121,7 +127,7 @@ pub(super) fn attr_macro_as_call_id(
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
censored_attr_ids,
},
macro_attr.ctxt,
)

View file

@ -3,14 +3,14 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
use std::{cmp::Ordering, iter, mem, ops::Not};
use std::{cmp::Ordering, iter, mem};
use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind,
AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
MacroCallKind, MacroDefId, MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@ -18,9 +18,10 @@ use hir_expand::{
proc_macro::CustomProcMacroExpander,
};
use intern::{Interned, sym};
use itertools::{Itertools, izip};
use itertools::izip;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast;
use triomphe::Arc;
@ -32,12 +33,11 @@ use crate::{
MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
UseLoc,
attr::Attrs,
db::DefDatabase,
item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
item_tree::{
self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
MacroRules, Mod, ModItemId, ModKind, TreeId,
self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId,
Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId,
},
macro_call_as_call_id,
nameres::{
@ -102,6 +102,7 @@ pub(super) fn collect_defs(
proc_macros,
from_glob_import: Default::default(),
skip_attrs: Default::default(),
prev_active_attrs: Default::default(),
unresolved_extern_crates: Default::default(),
is_proc_macro: krate.is_proc_macro,
};
@ -206,6 +207,7 @@ enum MacroDirectiveKind<'db> {
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr_id: AttrId,
attr: Attr,
mod_item: ModItemId,
/* is this needed? */ tree: TreeId,
@ -246,28 +248,27 @@ struct DefCollector<'db> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
// FIXME: There has to be a better way to do this
skip_attrs: FxHashMap<InFile<FileAstId<ast::Item>>, AttrId>,
skip_attrs: FxHashMap<AstId<ast::Item>, AttrId>,
/// When we expand attributes, we need to censor all previous active attributes
/// on the same item. Therefore, this holds all active attributes that we already
/// expanded.
prev_active_attrs: FxHashMap<AstId<ast::Item>, SmallVec<[AttrId; 1]>>,
}
impl<'db> DefCollector<'db> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
let file_id = self.def_map.krate.root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let attrs = match item_tree.top_level_attrs() {
AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
};
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
let mut process = true;
// Process other crate-level attributes.
for attr in &*attrs {
if let Some(cfg) = attr.cfg()
&& self.cfg_options.check(&cfg) == Some(false)
{
process = false;
break;
}
let Some(attr_name) = attr.path.as_ident() else { continue };
match () {
@ -291,7 +292,7 @@ impl<'db> DefCollector<'db> {
() if *attr_name == sym::feature => {
let features =
attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
|(feat, _)| match feat.segments() {
|(feat, _, _)| match feat.segments() {
[name] => Some(name.symbol().clone()),
_ => None,
},
@ -344,7 +345,7 @@ impl<'db> DefCollector<'db> {
self.inject_prelude();
if !process {
if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
return;
}
@ -362,10 +363,7 @@ impl<'db> DefCollector<'db> {
fn seed_with_inner(&mut self, tree_id: TreeId) {
let item_tree = tree_id.item_tree(self.db);
let is_cfg_enabled = item_tree
.top_level_attrs(self.db, self.def_map.krate)
.cfg()
.is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false));
let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
if is_cfg_enabled {
self.inject_prelude();
@ -456,18 +454,18 @@ impl<'db> DefCollector<'db> {
self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
.kind
{
MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: None,
invoc_attr_index: attr.id,
censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id),
},
attr.path().clone(),
(*attr.path).clone(),
));
self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id);
Some((idx, directive, *mod_item, *tree, *item_tree))
}
@ -1350,6 +1348,7 @@ impl<'db> DefCollector<'db> {
MacroDirectiveKind::Attr {
ast_id: file_ast_id,
mod_item,
attr_id,
attr,
tree,
item_tree,
@ -1362,7 +1361,7 @@ impl<'db> DefCollector<'db> {
let mod_dir = collector.mod_dirs[&directive.module_id].clone();
collector
.skip_attrs
.insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
.insert(InFile::new(file_id, mod_item.ast_id()), *attr_id);
ModCollector {
def_collector: collector,
@ -1398,7 +1397,6 @@ impl<'db> DefCollector<'db> {
// being cfg'ed out).
// Ideally we will just expand them to nothing here. But we are only collecting macro calls,
// not expanding them, so we have no way to do that.
// If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`.
if matches!(
def.kind,
MacroDefKind::BuiltInAttr(_, expander)
@ -1410,8 +1408,18 @@ impl<'db> DefCollector<'db> {
}
}
let call_id = || {
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
let mut call_id = || {
let active_attrs = self.prev_active_attrs.entry(ast_id).or_default();
active_attrs.push(*attr_id);
attr_macro_as_call_id(
self.db,
file_ast_id,
attr,
AttrMacroAttrIds::from_many(active_attrs),
self.def_map.krate,
def,
)
};
if matches!(def,
MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
@ -1429,7 +1437,7 @@ impl<'db> DefCollector<'db> {
let diag = DefDiagnostic::invalid_derive_target(
directive.module_id,
ast_id,
attr.id,
*attr_id,
);
self.def_map.diagnostics.push(diag);
return recollect_without(self);
@ -1442,7 +1450,7 @@ impl<'db> DefCollector<'db> {
Some(derive_macros) => {
let call_id = call_id();
let mut len = 0;
for (idx, (path, call_site)) in derive_macros.enumerate() {
for (idx, (path, call_site, _)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(
file_id,
ast_id.value,
@ -1453,7 +1461,7 @@ impl<'db> DefCollector<'db> {
depth: directive.depth + 1,
kind: MacroDirectiveKind::Derive {
ast_id,
derive_attr: attr.id,
derive_attr: *attr_id,
derive_pos: idx,
ctxt: call_site.ctx,
derive_macro_id: call_id,
@ -1469,13 +1477,13 @@ impl<'db> DefCollector<'db> {
// Check the comment in [`builtin_attr_macro`].
self.def_map.modules[directive.module_id]
.scope
.init_derive_attribute(ast_id, attr.id, call_id, len + 1);
.init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
}
None => {
let diag = DefDiagnostic::malformed_derive(
directive.module_id,
ast_id,
attr.id,
*attr_id,
);
self.def_map.diagnostics.push(diag);
}
@ -1712,16 +1720,17 @@ impl ModCollector<'_, '_> {
};
let mut process_mod_item = |item: ModItemId| {
let attrs = self.item_tree.attrs(db, krate, item.ast_id());
if let Some(cfg) = attrs.cfg()
&& !self.is_cfg_enabled(&cfg)
{
let ast_id = item.ast_id().erase();
self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
return;
}
let attrs = match self.item_tree.attrs(item.ast_id()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(cfg)) => {
let ast_id = item.ast_id().erase();
self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0);
return;
}
};
if let Err(()) = self.resolve_attributes(&attrs, item, container) {
if let Err(()) = self.resolve_attributes(attrs, item, container) {
// Do not process the item. It has at least one non-builtin attribute, so the
// fixed-point algorithm is required to resolve the rest of them.
return;
@ -1733,7 +1742,7 @@ impl ModCollector<'_, '_> {
self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
match item {
ModItemId::Mod(m) => self.collect_module(m, &attrs),
ModItemId::Mod(m) => self.collect_module(m, attrs),
ModItemId::Use(item_tree_id) => {
let id =
UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
@ -2006,7 +2015,7 @@ impl ModCollector<'_, '_> {
);
return;
};
for (path, _) in paths {
for (path, _, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@ -2020,7 +2029,7 @@ impl ModCollector<'_, '_> {
);
}
fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: Attrs<'_>) {
let path_attr = attrs.by_key(sym::path).string_value_unescape();
let is_macro_use = attrs.by_key(sym::macro_use).exists();
let module = &self.item_tree[module_ast_id];
@ -2061,23 +2070,18 @@ impl ModCollector<'_, '_> {
self.file_id(),
&module.name,
path_attr.as_deref(),
self.def_collector.def_map.krate,
) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
let krate = self.def_collector.def_map.krate;
let is_enabled = item_tree
.top_level_attrs(db, krate)
.cfg()
.and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
.map_or(Ok(()), Err);
match is_enabled {
Err(cfg) => {
match item_tree.top_level_attrs() {
AttrsOrCfg::CfgDisabled(cfg) => {
self.emit_unconfigured_diagnostic(
InFile::new(self.file_id(), module_ast_id.erase()),
&cfg,
&cfg.0,
);
}
Ok(()) => {
AttrsOrCfg::Enabled { attrs } => {
let module_id = self.push_child_module(
module.name.clone(),
ast_id.value,
@ -2093,11 +2097,8 @@ impl ModCollector<'_, '_> {
mod_dir,
}
.collect_in_top_module(item_tree.top_level_items());
let is_macro_use = is_macro_use
|| item_tree
.top_level_attrs(db, krate)
.by_key(sym::macro_use)
.exists();
let is_macro_use =
is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists();
if is_macro_use {
self.import_all_legacy_macros(module_id);
}
@ -2185,36 +2186,16 @@ impl ModCollector<'_, '_> {
/// assumed to be resolved already.
fn resolve_attributes(
&mut self,
attrs: &Attrs,
attrs: Attrs<'_>,
mod_item: ModItemId,
container: ItemContainerId,
) -> Result<(), ()> {
let mut ignore_up_to = self
let ignore_up_to = self
.def_collector
.skip_attrs
.get(&InFile::new(self.file_id(), mod_item.ast_id()))
.copied();
let iter = attrs
.iter()
.dedup_by(|a, b| {
// FIXME: this should not be required, all attributes on an item should have a
// unique ID!
// Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
// #[cfg_attr(not(off), unresolved, unresolved)]
// struct S;
// We should come up with a different way to ID attributes.
a.id == b.id
})
.skip_while(|attr| match ignore_up_to {
Some(id) if attr.id == id => {
ignore_up_to = None;
true
}
Some(_) => true,
None => false,
});
for attr in iter {
for (attr_id, attr) in attrs.iter_after(ignore_up_to) {
if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
continue;
}
@ -2229,6 +2210,7 @@ impl ModCollector<'_, '_> {
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::Attr {
ast_id,
attr_id,
attr: attr.clone(),
mod_item,
tree: self.tree_id,
@ -2246,7 +2228,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
let attrs = match self.item_tree.attrs(ast_id.upcast()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(_)) => {
unreachable!("we only get here if the macro is not cfg'ed out")
}
};
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
let export_attr = || attrs.by_key(sym::macro_export);
@ -2331,7 +2319,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_def(&mut self, ast_id: ItemTreeAstId<Macro2>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
let attrs = match self.item_tree.attrs(ast_id.upcast()) {
Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
None => Attrs::EMPTY,
Some(AttrsOrCfg::CfgDisabled(_)) => {
unreachable!("we only get here if the macro is not cfg'ed out")
}
};
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
// Case 1: builtin macros
@ -2515,10 +2509,6 @@ impl ModCollector<'_, '_> {
Some((a, b))
}
fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
self.def_collector.cfg_options.check(cfg) != Some(false)
}
fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
@ -2558,6 +2548,7 @@ mod tests {
proc_macros: Default::default(),
from_glob_import: Default::default(),
skip_attrs: Default::default(),
prev_active_attrs: Default::default(),
is_proc_macro: false,
unresolved_extern_crates: Default::default(),
};

View file

@ -17,8 +17,8 @@ pub enum DefDiagnosticKind {
UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
InvalidDeriveTarget { ast: AstId<ast::Item>, id: AttrId },
MalformedDerive { ast: AstId<ast::Adt>, id: AttrId },
MacroDefError { ast: AstId<ast::Macro>, message: String },
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
}
@ -119,10 +119,7 @@ impl DefDiagnostic {
ast: AstId<ast::Item>,
id: AttrId,
) -> Self {
Self {
in_module: container,
kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
}
Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } }
}
pub(super) fn malformed_derive(
@ -130,9 +127,6 @@ impl DefDiagnostic {
ast: AstId<ast::Adt>,
id: AttrId,
) -> Self {
Self {
in_module: container,
kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
}
Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } }
}
}

View file

@ -1,6 +1,6 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use base_db::{AnchoredPath, Crate};
use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@ -62,6 +62,7 @@ impl ModDir {
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
krate: Crate,
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.as_str();
@ -91,7 +92,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
is_mod_rs,
mod_dir,
));

View file

@ -3,8 +3,10 @@
use hir_expand::name::{AsName, Name};
use intern::sym;
use crate::attr::Attrs;
use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
use crate::{
item_tree::Attrs,
tt::{Leaf, TokenTree, TopSubtree, TtElement},
};
#[derive(Debug, PartialEq, Eq)]
pub struct ProcMacroDef {
@ -29,8 +31,8 @@ impl ProcMacroKind {
}
}
impl Attrs {
pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
impl Attrs<'_> {
pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
if self.is_proc_macro() {
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
} else if self.is_proc_macro_attribute() {
@ -51,15 +53,10 @@ impl Attrs {
}
}
pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
parse_macro_name_and_helper_attrs(derive)
}
pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
parse_macro_name_and_helper_attrs(derive)
}
}
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
@ -84,14 +81,11 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name
let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
let helpers = helpers
.iter()
.filter(
|tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
)
.map(|tt| match tt {
.filter_map(|tt| match tt {
TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
_ => None,
})
.collect::<Option<Box<[_]>>>()?;
.collect::<Box<[_]>>();
Some((trait_name.as_name(), helpers))
}

View file

@ -21,7 +21,7 @@ use triomphe::Arc;
use crate::{
ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
attr::Attrs,
attrs::AttrFlags,
db::DefDatabase,
expr_store::{
ExpressionStore, ExpressionStoreSourceMap,
@ -47,12 +47,13 @@ pub struct StructSignature {
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
pub shape: FieldsShape,
pub repr: Option<ReprOptions>,
}
bitflags! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct StructFlags: u8 {
/// Indicates whether this struct has `#[repr]`.
const HAS_REPR = 1 << 0;
/// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
/// Indicates whether the struct has a `#[fundamental]` attribute.
@ -74,26 +75,28 @@ impl StructSignature {
pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let InFile { file_id, value: source } = loc.source(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
if let Some(lang) = attrs.lang_item() {
if attrs.contains(AttrFlags::HAS_REPR) {
flags |= StructFlags::HAS_REPR;
}
if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) {
match lang {
_ if *lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
_ if *lang == sym::owned_box => flags |= StructFlags::IS_BOX,
_ if *lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
_ if *lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
_ if *lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
_ if lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
_ if lang == sym::owned_box => flags |= StructFlags::IS_BOX,
_ if lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
_ if lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
_ if lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
_ => (),
}
}
let repr = attrs.repr();
let shape = adt_shape(source.kind());
let (store, generic_params, source_map) = lower_generic_params(
@ -111,11 +114,19 @@ impl StructSignature {
flags,
shape,
name: as_name_opt(source.name()),
repr,
}),
Arc::new(source_map),
)
}
#[inline]
pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option<ReprOptions> {
if self.flags.contains(StructFlags::HAS_REPR) {
AttrFlags::repr(db, id.into())
} else {
None
}
}
}
#[inline]
@ -133,22 +144,22 @@ pub struct UnionSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
pub repr: Option<ReprOptions>,
}
impl UnionSignature {
pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
let repr = attrs.repr();
if attrs.contains(AttrFlags::HAS_REPR) {
flags |= StructFlags::HAS_REPR;
}
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
@ -164,7 +175,6 @@ impl UnionSignature {
generic_params,
store,
flags,
repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
@ -185,20 +195,17 @@ pub struct EnumSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: EnumFlags,
pub repr: Option<ReprOptions>,
}
impl EnumSignature {
pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = EnumFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
let repr = attrs.repr();
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
@ -214,15 +221,14 @@ impl EnumSignature {
generic_params,
store,
flags,
repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
)
}
pub fn variant_body_type(&self) -> IntegerType {
match self.repr {
pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType {
match AttrFlags::repr(db, id.into()) {
Some(ReprOptions { int: Some(builtin), .. }) => builtin,
_ => IntegerType::Pointer(true),
}
@ -250,9 +256,9 @@ impl ConstSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = ConstFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
let source = loc.source(db);
@ -305,9 +311,9 @@ impl StaticSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let mut flags = StaticFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
@ -432,7 +438,7 @@ impl TraitSignature {
let loc = id.lookup(db);
let mut flags = TraitFlags::empty();
let attrs = db.attrs(id.into());
let attrs = AttrFlags::query(db, id.into());
let source = loc.source(db);
if source.value.auto_token().is_some() {
flags.insert(TraitFlags::AUTO);
@ -443,34 +449,23 @@ impl TraitSignature {
if source.value.eq_token().is_some() {
flags.insert(TraitFlags::ALIAS);
}
if attrs.by_key(sym::fundamental).exists() {
if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= TraitFlags::FUNDAMENTAL;
}
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
if attrs.by_key(sym::rustc_paren_sugar).exists() {
if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) {
flags |= TraitFlags::RUSTC_PAREN_SUGAR;
}
if attrs.by_key(sym::rustc_coinductive).exists() {
if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) {
flags |= TraitFlags::COINDUCTIVE;
}
let mut skip_array_during_method_dispatch =
attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
let mut skip_boxed_slice_during_method_dispatch = false;
for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
for tt in tt.iter() {
if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
skip_array_during_method_dispatch |= ident.sym == sym::array;
skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
}
}
}
if skip_array_during_method_dispatch {
if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
}
if skip_boxed_slice_during_method_dispatch {
if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
}
@ -502,7 +497,8 @@ bitflags! {
const HAS_TARGET_FEATURE = 1 << 9;
const DEPRECATED_SAFE_2024 = 1 << 10;
const EXPLICIT_SAFE = 1 << 11;
const RUSTC_INTRINSIC = 1 << 12;
const HAS_LEGACY_CONST_GENERICS = 1 << 12;
const RUSTC_INTRINSIC = 1 << 13;
}
}
@ -515,8 +511,6 @@ pub struct FunctionSignature {
pub ret_type: Option<TypeRefId>,
pub abi: Option<Symbol>,
pub flags: FnFlags,
// FIXME: we should put this behind a fn flags + query to avoid bloating the struct
pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
}
impl FunctionSignature {
@ -528,23 +522,26 @@ impl FunctionSignature {
let module = loc.container.module(db);
let mut flags = FnFlags::empty();
let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
let attrs = AttrFlags::query(db, id.into());
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
if attrs.by_key(sym::target_feature).exists() {
if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
flags.insert(FnFlags::HAS_TARGET_FEATURE);
}
if attrs.by_key(sym::rustc_intrinsic).exists() {
if attrs.contains(AttrFlags::RUSTC_INTRINSIC) {
flags.insert(FnFlags::RUSTC_INTRINSIC);
}
let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) {
flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS);
}
let source = loc.source(db);
if source.value.unsafe_token().is_some() {
if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) {
flags.insert(FnFlags::DEPRECATED_SAFE_2024);
} else {
flags.insert(FnFlags::UNSAFE);
@ -586,7 +583,6 @@ impl FunctionSignature {
ret_type,
abi,
flags,
legacy_const_generics_indices,
name,
}),
Arc::new(source_map),
@ -635,6 +631,19 @@ impl FunctionSignature {
self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
}
#[inline]
pub fn legacy_const_generics_indices<'db>(
&self,
db: &'db dyn DefDatabase,
id: FunctionId,
) -> Option<&'db [u32]> {
if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) {
return None;
}
AttrFlags::legacy_const_generic_indices(db, id).as_deref()
}
pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
let data = db.function_signature(id);
data.flags.contains(FnFlags::RUSTC_INTRINSIC)
@ -678,11 +687,11 @@ impl TypeAliasSignature {
let loc = id.lookup(db);
let mut flags = TypeAliasFlags::empty();
let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
let attrs = AttrFlags::query(db, id.into());
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
}
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
@ -865,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
let mut has_fields = false;
for (ty, field) in fields.value {
has_fields = true;
match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
match AttrFlags::is_cfg_enabled_for(&field, cfg_options) {
Ok(()) => {
let type_ref =
col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
@ -927,7 +936,6 @@ impl EnumVariants {
let loc = e.lookup(db);
let source = loc.source(db);
let ast_id_map = db.ast_id_map(source.file_id);
let span_map = db.span_map(source.file_id);
let mut diagnostics = ThinVec::new();
let cfg_options = loc.container.krate.cfg_options(db);
@ -939,7 +947,7 @@ impl EnumVariants {
.variants()
.filter_map(|variant| {
let ast_id = ast_id_map.ast_id(&variant);
match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) {
Ok(()) => {
let enum_variant =
EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }

View file

@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast};
use crate::{
AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
UseId, VariantId, attr::Attrs, db::DefDatabase,
UseId, VariantId, attrs::AttrFlags, db::DefDatabase,
};
pub trait HasSource {
@ -145,15 +145,13 @@ impl HasChildSource<LocalFieldId> for VariantId {
(lookup.source(db).map(|it| it.kind()), lookup.container)
}
};
let span_map = db.span_map(src.file_id);
let mut map = ArenaMap::new();
match &src.value {
ast::StructKind::Tuple(fl) => {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
let enabled =
Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}
@ -168,8 +166,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
let enabled =
Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}

View file

@ -190,7 +190,15 @@ impl TestDB {
let mut res = DefMap::ROOT;
for (module, data) in def_map.modules() {
let src = data.definition_source(self);
if src.file_id != position.file_id {
// We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
// `position.file_id` is created before the def map, causing it to have to wrong crate
// attached often, which means it won't compare equal. This should not be a problem in real
// r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
// (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
let Some(file_id) = src.file_id.file_id() else {
continue;
};
if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
@ -230,7 +238,15 @@ impl TestDB {
let mut fn_def = None;
for (_, module) in def_map.modules() {
let file_id = module.definition_source(self).file_id;
if file_id != position.file_id {
// We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
// `position.file_id` is created before the def map, causing it to have to wrong crate
// attached often, which means it won't compare equal. This should not be a problem in real
// r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
// (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
let Some(file_id) = file_id.file_id() else {
continue;
};
if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
for decl in module.scope.declarations() {
@ -253,26 +269,25 @@ impl TestDB {
};
if size != Some(new_size) {
size = Some(new_size);
fn_def = Some(it);
fn_def = Some((it, file_id));
}
}
}
}
// Find the innermost block expression that has a `DefMap`.
let def_with_body = fn_def?.into();
let (def_with_body, file_id) = fn_def?;
let def_with_body = def_with_body.into();
let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body);
let root_syntax_node = self.parse(position.file_id).syntax_node();
let root_syntax_node = self.parse(file_id).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
let expr = ast::Expr::from(block);
let expr_id = source_map
.node_expr(InFile::new(position.file_id.into(), &expr))?
.as_expr()
.unwrap();
let expr_id =
source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap();
let scope = scopes.scope_for(expr_id).unwrap();
Some(scope)
});

View file

@ -23,6 +23,8 @@ triomphe.workspace = true
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
arrayvec.workspace = true
thin-vec.workspace = true
# local deps
stdx.workspace = true

View file

@ -1,200 +1,397 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::iter;
use std::{borrow::Cow, fmt, ops};
//! Defines the basics of attributes lowering.
//!
//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling
//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
//! things from [`Meta`], therefore it contains many parts. The basic idea is:
//!
//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
//! the path only if it has up to 2 segments, or one segment for `path = value`.
//! We also only keep the value in `path = value` if it is a literal. However, we always
//! save the all relevant ranges of attributes (the path range, and the full attribute range)
//! for parts of r-a (e.g. name resolution) that need a faithful representation of the
//! attribute.
//!
//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
//! all attributes.
//!
//! Another thing to note is that we need to be able to map an attribute back to a range
//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an
//! index into the item tree attributes list. To minimize the risk of bugs, we have one
//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
//! an attribute participate in name resolution.
use std::{
borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
};
use ::tt::{TextRange, TextSize};
use arrayvec::ArrayVec;
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use intern::{Interned, Symbol, sym};
use intern::{Interned, Symbol};
use mbe::{DelimiterKind, Punct};
use smallvec::{SmallVec, smallvec};
use span::{Span, SyntaxContext};
use syntax::unescape;
use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
use triomphe::ThinArc;
use parser::T;
use smallvec::SmallVec;
use span::{RealSpanMap, Span, SyntaxContext};
use syntax::{
AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
ast::{self, TokenTreeChildren},
unescape,
};
use syntax_bridge::DocCommentDesugarMode;
use crate::{
AstId,
db::ExpandDatabase,
mod_path::ModPath,
name::Name,
span_map::SpanMapRef,
tt::{self, TopSubtree, token_to_literal},
tt::{self, TopSubtree},
};
/// Syntactical attributes, without filtering of `cfg_attr`s.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct RawAttrs {
// FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted.
entries: Option<ThinArc<(), Attr>>,
#[derive(Debug)]
pub struct AttrPath {
/// This can be empty if the path is not of 1 or 2 segments exactly.
pub segments: ArrayVec<SyntaxToken, 2>,
pub range: TextRange,
// FIXME: This shouldn't be textual, `#[test]` needs name resolution.
// And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
// fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
// attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
// may impact perf. So it was easier to just hack it here.
pub is_test: bool,
}
impl ops::Deref for RawAttrs {
type Target = [Attr];
fn deref(&self) -> &[Attr] {
match &self.entries {
Some(it) => &it.slice,
None => &[],
}
}
}
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
/// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
pub fn new_expanded(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Self {
let entries: Vec<_> =
Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> impl Iterator<Item = Attr> {
collect_attrs(owner).filter_map(move |(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
impl AttrPath {
#[inline]
fn extract(path: &ast::Path) -> Self {
let mut is_test = false;
let segments = (|| {
let mut segments = ArrayVec::new();
let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
if segment2.text() == "test" {
// `#[test]` or `#[core::prelude::vX::test]`.
is_test = true;
}
Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
let span = span_map.span_for_range(comment.syntax().text_range());
let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
Attr {
id,
input: Some(Box::new(AttrInput::Literal(tt::Literal {
symbol: text,
span,
kind,
suffix: None,
}))),
path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
ctxt: span.ctx,
let segment1 = path.qualifier();
if let Some(segment1) = segment1 {
if segment1.qualifier().is_some() {
None
} else {
let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
segments.push(segment1);
segments.push(segment2);
Some(segments)
}
}),
Either::Right(_) => None,
})
}
pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> impl Iterator<Item = Attr> {
Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
}
pub fn merge(&self, other: Self) -> Self {
match (&self.entries, other.entries) {
(None, None) => Self::EMPTY,
(None, entries @ Some(_)) => Self { entries },
(Some(entries), None) => Self { entries: Some(entries.clone()) },
(Some(a), Some(b)) => {
let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
let items = a
.slice
.iter()
.cloned()
.chain(b.slice.iter().map(|it| {
let mut it = it.clone();
let id = it.id.ast_index() + last_ast_index;
it.id = AttrId::new(id, it.id.is_inner_attr());
it
}))
.collect::<Vec<_>>();
Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
} else {
segments.push(segment2);
Some(segments)
}
})();
AttrPath {
segments: segments.unwrap_or(ArrayVec::new()),
range: path.syntax().text_range(),
is_test,
}
}
/// Processes `cfg_attr`s
pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
let has_cfg_attrs =
self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
if !has_cfg_attrs {
return self;
#[inline]
pub fn is1(&self, segment: &str) -> bool {
self.segments.len() == 1 && self.segments[0].text() == segment
}
}
#[derive(Debug)]
pub enum Meta {
/// `name` is `None` if not a single token. `value` is a literal or `None`.
NamedKeyValue {
path_range: TextRange,
name: Option<SyntaxToken>,
value: Option<SyntaxToken>,
},
TokenTree {
path: AttrPath,
tt: ast::TokenTree,
},
Path {
path: AttrPath,
},
}
impl Meta {
#[inline]
pub fn path_range(&self) -> TextRange {
match self {
Meta::NamedKeyValue { path_range, .. } => *path_range,
Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
}
}
fn extract(iter: &mut Peekable<TokenTreeChildren>) -> Option<(Self, TextSize)> {
let mut start_offset = None;
if let Some(NodeOrToken::Token(colon1)) = iter.peek()
&& colon1.kind() == T![:]
{
start_offset = Some(colon1.text_range().start());
iter.next();
iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
}
let first_segment = iter
.next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
.into_token()?;
let mut is_test = first_segment.text() == "test";
let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
let mut segments_len = 1;
let mut second_segment = None;
let mut path_range = first_segment.text_range();
while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& let Some(NodeOrToken::Token(segment)) = iter.peek()
&& segment.kind().is_any_identifier()
{
segments_len += 1;
is_test = segment.text() == "test";
second_segment = Some(segment.clone());
path_range = TextRange::new(path_range.start(), segment.text_range().end());
iter.next();
}
let cfg_options = krate.cfg_options(db);
let new_attrs = self
.iter()
.cloned()
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
.collect::<Vec<_>>();
let entries = if new_attrs.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
let segments = |first, second| {
let mut segments = ArrayVec::new();
if segments_len <= 2 {
segments.push(first);
if let Some(second) = second {
segments.push(second);
}
}
segments
};
RawAttrs { entries }
let meta = match iter.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
iter.next();
let value = match iter.peek() {
Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
// No need to consume it, it will be consumed by `extract_and_eat_comma()`.
Some(token.clone())
}
_ => None,
};
let name = if second_segment.is_none() { Some(first_segment) } else { None };
Meta::NamedKeyValue { path_range, name, value }
}
Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
tt: tt.clone(),
},
_ => Meta::Path {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
},
};
Some((meta, start_offset))
}
pub fn is_empty(&self) -> bool {
self.entries.is_none()
fn extract_possibly_unsafe(
iter: &mut Peekable<TokenTreeChildren>,
container: &ast::TokenTree,
) -> Option<(Self, TextRange)> {
if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
iter.next();
let tt = iter.next()?.into_node()?;
let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
|(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
);
while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
result
} else {
Self::extract(iter).map(|(meta, start_offset)| {
let end_offset = 'find_end_offset: {
for it in iter {
if let NodeOrToken::Token(it) = it
&& it.kind() == T![,]
{
break 'find_end_offset it.text_range().start();
}
}
tt_end_offset(container)
};
(meta, TextRange::new(start_offset, end_offset))
})
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AttrId {
id: u32,
fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
tt.syntax().last_token().unwrap().text_range().start()
}
// FIXME: This only handles a single level of cfg_attr nesting
// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
impl AttrId {
const INNER_ATTR_SET_BIT: u32 = 1 << 31;
/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
#[inline]
pub fn expand_cfg_attr<'a, BreakValue>(
attrs: impl Iterator<Item = ast::Attr>,
cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
expand_cfg_attr_with_doc_comments::<Infallible, _>(
attrs.map(Either::Left),
cfg_options,
move |Either::Left((meta, container, range, top_attr))| {
callback(meta, container, range, top_attr)
},
)
}
pub fn new(id: usize, is_inner: bool) -> Self {
assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
let id = id as u32;
Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
}
#[inline]
pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
mut attrs: impl Iterator<Item = Either<ast::Attr, DocComment>>,
mut cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(
Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
let mut stack = SmallVec::<[_; 1]>::new();
let result = attrs.try_for_each(|top_attr| {
let top_attr = match top_attr {
Either::Left(it) => it,
Either::Right(comment) => return callback(Either::Right(comment)),
};
if let Some((attr_name, tt)) = top_attr.as_simple_call()
&& attr_name == "cfg_attr"
{
let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((tt_iter, tt));
while let Some((tt_iter, tt)) = stack.last_mut() {
let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
stack.pop();
continue;
};
if let Meta::TokenTree { path, tt: nested_tt } = &attr
&& path.is1("cfg_attr")
{
let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((nested_tt_iter, nested_tt.clone()));
}
} else {
callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
}
}
}
} else if let Some(ast_meta) = top_attr.meta()
&& let Some(path) = ast_meta.path()
{
let path = AttrPath::extract(&path);
let meta = if let Some(tt) = ast_meta.token_tree() {
Meta::TokenTree { path, tt }
} else if let Some(value) = ast_meta.expr() {
let value =
if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
let name =
if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
Meta::NamedKeyValue { name, value, path_range: path.range }
} else {
Meta::Path { path }
};
callback(Either::Left((
meta,
ast_meta.syntax(),
ast_meta.syntax().text_range(),
&top_attr,
)))?;
}
ControlFlow::Continue(())
});
result.break_value()
}
pub fn ast_index(&self) -> usize {
(self.id & !Self::INNER_ATTR_SET_BIT) as usize
}
#[inline]
pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
matches!(
name,
"doc"
| "stable"
| "unstable"
| "target_feature"
| "allow"
| "expect"
| "warn"
| "deny"
| "forbid"
| "repr"
| "inline"
| "track_caller"
| "must_use"
)
}
pub fn is_inner_attr(&self) -> bool {
self.id & Self::INNER_ATTR_SET_BIT != 0
}
/// This collects attributes exactly as the item tree needs them. This is used for the item tree,
/// as well as for resolving [`AttrId`]s.
pub fn collect_item_tree_attrs<'a, BreakValue>(
owner: &dyn ast::HasAttrs,
cfg_options: impl Fn() -> &'a CfgOptions,
mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow<BreakValue>,
) -> Option<Either<BreakValue, CfgExpr>> {
let attrs = ast::attrs_including_inner(owner);
expand_cfg_attr(
attrs,
|| cfg_options(),
|attr, container, range, top_attr| {
// We filter builtin attributes that we don't need for nameres, because this saves memory.
// I only put the most common attributes, but if some attribute becomes common feel free to add it.
// Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
let filter = match &attr {
Meta::NamedKeyValue { name: Some(name), .. } => {
is_item_tree_filtered_attr(name.text())
}
Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
let name = path.segments[0].text();
if name == "cfg" {
let cfg =
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
if cfg_options().check(&cfg) == Some(false) {
return ControlFlow::Break(Either::Right(cfg));
}
true
} else {
is_item_tree_filtered_attr(name)
}
}
Meta::Path { path } => {
path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
}
_ => false,
};
if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
return ControlFlow::Break(Either::Left(v));
}
ControlFlow::Continue(())
},
)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Box<AttrInput>>,
pub ctxt: SyntaxContext,
@ -217,131 +414,6 @@ impl fmt::Display for AttrInput {
}
}
impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
let path = ast.path()?;
let range = path.syntax().text_range();
let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
span_map.span_for_range(range).ctx
})?);
let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let token = lit.token();
Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
} else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
Some(Box::new(AttrInput::TokenTree(tree)))
} else {
None
};
Some(Attr { id, path, input, ctxt: span.ctx })
}
fn from_tt(
db: &dyn ExpandDatabase,
mut tt: tt::TokenTreesView<'_>,
id: AttrId,
) -> Option<Attr> {
if matches!(tt.flat_tokens(),
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
if *sym == sym::unsafe_
) {
match tt.iter().nth(1) {
Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
_ => return None,
}
}
let first = tt.flat_tokens().first()?;
let ctxt = first.first_span().ctx;
let (path, input) = {
let mut iter = tt.iter();
let start = iter.savepoint();
let mut input = tt::TokenTreesView::new(&[]);
let mut path = iter.from_savepoint(start);
let mut path_split_savepoint = iter.savepoint();
while let Some(tt) = iter.next() {
path = iter.from_savepoint(start);
if !matches!(
tt,
tt::TtElement::Leaf(
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
)
) {
input = path_split_savepoint.remaining();
break;
}
path_split_savepoint = iter.savepoint();
}
(path, input)
};
let path = Interned::new(ModPath::from_tt(db, path)?);
let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
(_, Some(tree)) => {
Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
}
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
match input.flat_tokens().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
Some(Box::new(AttrInput::Literal(lit.clone())))
}
_ => None,
}
}
_ => None,
};
Some(Attr { id, path, input, ctxt })
}
pub fn path(&self) -> &ModPath {
&self.path
}
pub fn expand_cfg_attr(
self,
db: &dyn ExpandDatabase,
cfg_options: &CfgOptions,
) -> impl IntoIterator<Item = Self> {
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
if !is_cfg_attr {
return smallvec![self];
}
let subtree = match self.token_tree_value() {
Some(it) => it,
_ => return smallvec![self.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![self.clone()],
};
let index = self.id;
let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect::<SmallVec<[_; 1]>>()
}
}
}
impl Attr {
/// #[path = "string"]
pub fn string_value(&self) -> Option<&Symbol> {
@ -403,30 +475,26 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
) -> Option<impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> + 'a> {
let args = self.token_tree_value()?;
if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
return None;
}
let paths = args
.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| {
let span = tts.flat_tokens().first()?.first_span();
Some((ModPath::from_tt(db, tts)?, span))
});
Some(paths)
Some(parse_path_comma_token_tree(db, args))
}
}
pub fn cfg(&self) -> Option<CfgExpr> {
if *self.path.as_ident()? == sym::cfg {
self.token_tree_value().map(CfgExpr::parse)
} else {
None
}
}
fn parse_path_comma_token_tree<'a>(
db: &'a dyn ExpandDatabase,
args: &'a tt::TopSubtree,
) -> impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> {
args.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| {
let span = tts.flat_tokens().first()?.first_span();
Some((ModPath::from_tt(db, tts)?, span, tts))
})
}
fn unescape(s: &str) -> Option<Cow<'_, str>> {
@ -455,58 +523,104 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
}
}
pub fn collect_attrs(
owner: &dyn ast::HasAttrs,
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
let inner_attrs =
inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
.filter(|el| match el {
Either::Left(attr) => attr.kind().is_outer(),
Either::Right(comment) => comment.is_outer(),
})
.zip(iter::repeat(false));
outer_attrs
.chain(inner_attrs)
.enumerate()
.map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
/// This is an index of an attribute *that always points to the item tree attributes*.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AttrId {
id: u32,
}
fn inner_attributes(
syntax: &SyntaxNode,
) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
let node = match_ast! {
match syntax {
ast::SourceFile(_) => syntax.clone(),
ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
ast::Module(it) => it.item_list()?.syntax().clone(),
ast::BlockExpr(it) => {
if !it.may_carry_attributes() {
return None
impl AttrId {
#[inline]
pub fn from_item_tree_index(id: u32) -> Self {
Self { id }
}
#[inline]
pub fn item_tree_index(self) -> u32 {
self.id
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
pub fn find_attr_range<N: ast::HasAttrs>(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: AstId<N>,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
self.find_attr_range_with_source(db, krate, &owner.to_node(db))
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
pub fn find_attr_range_with_source(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: &dyn ast::HasAttrs,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
let cfg_options = OnceCell::new();
let mut index = 0;
let result = collect_item_tree_attrs(
owner,
|| cfg_options.get_or_init(|| krate.cfg_options(db)),
|meta, container, top_attr, range| {
if index == self.id {
return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
}
syntax.clone()
index += 1;
ControlFlow::Continue(())
},
_ => return None,
);
match result {
Some(Either::Left(it)) => it,
_ => {
panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}");
}
}
};
}
let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
Either::Left(attr) => attr.kind().is_inner(),
Either::Right(comment) => comment.is_inner(),
});
Some(attrs)
}
// Input subtree is: `(cfg, $(attr),+)`
// Split it up into a `cfg` subtree and the `attr` subtrees.
fn parse_cfg_attr_input(
subtree: &TopSubtree,
) -> Option<(tt::TokenTreesView<'_>, impl Iterator<Item = tt::TokenTreesView<'_>>)> {
let mut parts = subtree
.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
let cfg = parts.next()?;
Some((cfg, parts.filter(|it| !it.is_empty())))
pub fn find_derive_range(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: AstId<ast::Adt>,
derive_index: u32,
) -> TextRange {
let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
let Meta::TokenTree { tt, .. } = derive_attr else {
return derive_attr_range;
};
// Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
span::FileId::from_raw(0),
));
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&span_map),
span_map.span_for_range(tt.syntax().text_range()),
DocCommentDesugarMode::ProcMacro,
);
let Some((_, _, derive_tts)) =
parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
else {
return derive_attr_range;
};
let (Some(first_tt), Some(last_tt)) =
(derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
else {
return derive_attr_range;
};
let start = first_tt.first_span().range.start();
let end = match last_tt {
tt::TokenTree::Leaf(it) => it.span().range.end(),
tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
};
TextRange::new(start, end)
}
}

View file

@ -772,7 +772,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
}
}

View file

@ -1,373 +1,343 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
use std::iter::Peekable;
use std::{cell::OnceCell, ops::ControlFlow};
use ::tt::TextRange;
use base_db::Crate;
use cfg::{CfgAtom, CfgExpr};
use intern::{Symbol, sym};
use rustc_hash::FxHashSet;
use cfg::CfgExpr;
use parser::T;
use smallvec::SmallVec;
use syntax::{
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
ast::{self, HasAttrs, TokenTreeChildren},
};
use tracing::{debug, warn};
use syntax_bridge::DocCommentDesugarMode;
use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
use crate::{
attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
db::ExpandDatabase,
fixup::{self, SyntaxFixupUndoInfo},
span_map::SpanMapRef,
tt::{self, DelimSpan, Span},
};
fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
Some(enabled)
struct ItemIsCfgedOut;
#[derive(Debug)]
struct ExpandedAttrToProcess {
range: TextRange,
}
fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum NextExpandedAttrState {
NotStarted,
InTheMiddle,
}
#[derive(Debug)]
struct AstAttrToProcess {
range: TextRange,
expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>,
expanded_attrs_idx: usize,
next_expanded_attr: NextExpandedAttrState,
pound_span: Span,
brackets_span: DelimSpan,
/// If `Some`, this is an inner attribute.
excl_span: Option<Span>,
}
fn macro_input_callback(
db: &dyn ExpandDatabase,
is_derive: bool,
censor_item_tree_attr_ids: &[AttrId],
krate: Crate,
default_span: Span,
span_map: SpanMapRef<'_>,
) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>) {
let cfg_options = OnceCell::new();
let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db));
let mut should_strip_attr = {
let mut item_tree_attr_id = 0;
let mut censor_item_tree_attr_ids_index = 0;
move || {
let mut result = false;
if let Some(&next_censor_attr_id) =
censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index)
&& next_censor_attr_id.item_tree_index() == item_tree_attr_id
{
censor_item_tree_attr_ids_index += 1;
result = true;
}
item_tree_attr_id += 1;
result
}
};
let mut attrs = Vec::new();
let mut attrs_idx = 0;
let mut has_inner_attrs_owner = false;
let mut in_attr = false;
let mut done_with_attrs = false;
let mut did_top_attrs = false;
move |preorder, event| {
match event {
WalkEvent::Enter(SyntaxElement::Node(node)) => {
if done_with_attrs {
return (true, Vec::new());
}
if ast::Attr::can_cast(node.kind()) {
in_attr = true;
let node_range = node.text_range();
while attrs
.get(attrs_idx)
.is_some_and(|it: &AstAttrToProcess| it.range != node_range)
{
attrs_idx += 1;
}
} else if let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) {
if has_inner_attrs_owner {
has_inner_attrs_owner = false;
return (true, Vec::new());
}
if did_top_attrs && !is_derive {
// Derives need all attributes handled, but attribute macros need only the top attributes handled.
done_with_attrs = true;
return (true, Vec::new());
}
did_top_attrs = true;
if let Some(inner_attrs_node) = has_attrs.inner_attributes_node()
&& inner_attrs_node != *node
{
has_inner_attrs_owner = true;
}
let node_attrs = ast::attrs_including_inner(&has_attrs);
attrs.clear();
node_attrs.clone().for_each(|attr| {
let span_for = |token: Option<SyntaxToken>| {
token
.map(|token| span_map.span_for_range(token.text_range()))
.unwrap_or(default_span)
};
attrs.push(AstAttrToProcess {
range: attr.syntax().text_range(),
pound_span: span_for(attr.pound_token()),
brackets_span: DelimSpan {
open: span_for(attr.l_brack_token()),
close: span_for(attr.r_brack_token()),
},
excl_span: attr
.excl_token()
.map(|token| span_map.span_for_range(token.text_range())),
expanded_attrs: SmallVec::new(),
expanded_attrs_idx: 0,
next_expanded_attr: NextExpandedAttrState::NotStarted,
});
});
attrs_idx = 0;
let strip_current_item = expand_cfg_attr(
node_attrs,
&cfg_options,
|attr, _container, range, top_attr| {
// Find the attr.
while attrs[attrs_idx].range != top_attr.syntax().text_range() {
attrs_idx += 1;
}
let mut strip_current_attr = false;
match attr {
Meta::NamedKeyValue { name, .. } => {
if name
.is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
{
strip_current_attr = should_strip_attr();
}
}
Meta::TokenTree { path, tt } => {
if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
{
strip_current_attr = should_strip_attr();
}
if path.segments.len() == 1 {
let name = path.segments[0].text();
if name == "cfg" {
let cfg_expr = CfgExpr::parse_from_ast(
&mut TokenTreeChildren::new(&tt).peekable(),
);
if cfg_options().check(&cfg_expr) == Some(false) {
return ControlFlow::Break(ItemIsCfgedOut);
}
strip_current_attr = true;
}
}
}
Meta::Path { path } => {
if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
{
strip_current_attr = should_strip_attr();
}
}
}
if !strip_current_attr {
attrs[attrs_idx]
.expanded_attrs
.push(ExpandedAttrToProcess { range });
}
ControlFlow::Continue(())
},
);
attrs_idx = 0;
if strip_current_item.is_some() {
preorder.skip_subtree();
attrs.clear();
'eat_comma: {
// If there is a comma after this node, eat it too.
let mut events_until_comma = 0;
for event in preorder.clone() {
match event {
WalkEvent::Enter(SyntaxElement::Node(_))
| WalkEvent::Leave(_) => {}
WalkEvent::Enter(SyntaxElement::Token(token)) => {
let kind = token.kind();
if kind == T![,] {
break;
} else if !kind.is_trivia() {
break 'eat_comma;
}
}
}
events_until_comma += 1;
}
preorder.nth(events_until_comma);
}
return (false, Vec::new());
}
}
}
WalkEvent::Leave(SyntaxElement::Node(node)) => {
if ast::Attr::can_cast(node.kind()) {
in_attr = false;
attrs_idx += 1;
}
}
WalkEvent::Enter(SyntaxElement::Token(token)) => {
if !in_attr {
return (true, Vec::new());
}
let Some(ast_attr) = attrs.get_mut(attrs_idx) else {
return (true, Vec::new());
};
let token_range = token.text_range();
let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx)
else {
// No expanded attributes in this `ast::Attr`, or we finished them all already, either way
// the remaining tokens should be discarded.
return (false, Vec::new());
};
match ast_attr.next_expanded_attr {
NextExpandedAttrState::NotStarted => {
if token_range.start() >= expanded_attr.range.start() {
// We started the next attribute.
let mut insert_tokens = Vec::with_capacity(3);
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '#',
spacing: tt::Spacing::Alone,
span: ast_attr.pound_span,
}));
if let Some(span) = ast_attr.excl_span {
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '!',
spacing: tt::Spacing::Alone,
span,
}));
}
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
char: '[',
spacing: tt::Spacing::Alone,
span: ast_attr.brackets_span.open,
}));
ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle;
return (true, insert_tokens);
} else {
// Before any attribute or between the attributes.
return (false, Vec::new());
}
}
NextExpandedAttrState::InTheMiddle => {
if token_range.start() >= expanded_attr.range.end() {
// Finished the current attribute.
let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
char: ']',
spacing: tt::Spacing::Alone,
span: ast_attr.brackets_span.close,
})];
ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted;
ast_attr.expanded_attrs_idx += 1;
// It's safe to ignore the current token because between attributes
// there is always at least one token we skip - either the closing bracket
// in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion.
return (false, insert_tokens);
} else {
// Still in the middle.
return (true, Vec::new());
}
}
}
}
WalkEvent::Leave(SyntaxElement::Token(_)) => {}
}
(true, Vec::new())
}
check_cfg_attr_value(db, &attr.token_tree()?, krate)
}
pub(crate) fn attr_macro_input_to_token_tree(
db: &dyn ExpandDatabase,
node: &SyntaxNode,
span_map: SpanMapRef<'_>,
span: Span,
is_derive: bool,
censor_item_tree_attr_ids: &[AttrId],
krate: Crate,
) -> (tt::TopSubtree, SyntaxFixupUndoInfo) {
let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro);
(
syntax_bridge::syntax_node_to_token_tree_modified(
node,
span_map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map),
),
fixups.undo_info,
)
}
pub fn check_cfg_attr_value(
db: &dyn ExpandDatabase,
attr: &TokenTree,
attr: &ast::TokenTree,
krate: Crate,
) -> Option<bool> {
let cfg_expr = parse_from_attr_token_tree(attr)?;
let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
Some(enabled)
}
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
db: &dyn ExpandDatabase,
items: impl Iterator<Item = I>,
krate: Crate,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
for item in items {
let field_attrs = item.attrs();
'attrs: for attr in field_attrs {
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
debug!("censoring {:?}", item.syntax());
remove.insert(item.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&item, remove);
break 'attrs;
}
}
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
}
Some(())
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum CfgExprStage {
/// Stripping the CFGExpr part of the attribute
StrippigCfgExpr,
/// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
FoundComma,
/// Everything following the attribute. This could be another attribute or the end of the attribute.
// FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
EverythingElse,
}
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
debug!("Enabling attribute {}", meta);
let meta_path = meta.path()?;
debug!("Removing {:?}", meta_path.syntax());
remove.insert(meta_path.syntax().clone().into());
let meta_tt = meta.token_tree()?;
debug!("meta_tt {}", meta_tt);
let mut stage = CfgExprStage::StrippigCfgExpr;
for tt in meta_tt.token_trees_and_tokens() {
debug!("Checking {:?}. Stage: {:?}", tt, stage);
match (stage, tt) {
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
if token.kind() == T![,] {
stage = CfgExprStage::FoundComma;
}
remove.insert(token.into());
}
(CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
if (token.kind() == T![,] || token.kind() == T![')']) =>
{
// The end of the attribute or separator for the next attribute
stage = CfgExprStage::EverythingElse;
remove.insert(token.into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
remove.insert(node.syntax().clone().into());
}
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
remove.insert(token.into());
}
// This is an actual attribute
_ => {}
}
}
if stage != CfgExprStage::EverythingElse {
warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
return None;
}
Some(remove)
}
/// Removes a possible comma after the [AstNode]
fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
res.insert(comma);
}
}
fn process_enum(
db: &dyn ExpandDatabase,
variants: VariantList,
krate: Crate,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
'variant: for variant in variants.variants() {
for attr in variant.attrs() {
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
// Rustc does not strip the attribute if it is enabled. So we will leave it
debug!("censoring type {:?}", variant.syntax());
remove.insert(variant.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&variant, remove);
continue 'variant;
}
}
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", variant.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
if let Some(fields) = variant.field_list() {
match fields {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
}
}
}
Some(())
}
pub(crate) fn process_cfg_attrs(
db: &dyn ExpandDatabase,
node: &SyntaxNode,
loc: &MacroCallLoc,
) -> Option<FxHashSet<SyntaxElement>> {
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind {
MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
_ => false,
};
let mut remove = FxHashSet::default();
let item = ast::Item::cast(node.clone())?;
for attr in item.attrs() {
if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("Removing type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
}
}
}
if is_derive {
// Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
// (cfg_attr is handled above, cfg is handled in the def map).
match item {
ast::Item::Struct(it) => match it.field_list()? {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(
db,
fields.fields(),
loc.krate,
&mut remove,
)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(
db,
fields.fields(),
loc.krate,
&mut remove,
)?;
}
},
ast::Item::Enum(it) => {
process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
}
ast::Item::Union(it) => {
process_has_attrs_with_possible_comma(
db,
it.record_field_list()?.fields(),
loc.krate,
&mut remove,
)?;
}
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
_ => {}
}
}
Some(remove)
}
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
let mut iter = tt
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
next_cfg_expr_from_syntax(&mut iter)
}
fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
}
fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
}
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
where
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
{
let name = match iter.next() {
None => return None,
Some(NodeOrToken::Token(element)) => match element.kind() {
syntax::T![ident] => Symbol::intern(element.text()),
_ => return Some(CfgExpr::Invalid),
},
Some(_) => return Some(CfgExpr::Invalid),
};
let result = match &name {
s if [&sym::all, &sym::any, &sym::not].contains(&s) => {
let mut preds = Vec::new();
let Some(NodeOrToken::Node(tree)) = iter.next() else {
return Some(CfgExpr::Invalid);
};
let mut tree_iter = tree
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
while tree_iter.peek().is_some() {
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
if let Some(pred) = pred {
preds.push(pred);
}
}
let group = match &name {
s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()),
s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()),
s if *s == sym::not => {
CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid)))
}
_ => unreachable!(),
};
Some(group)
}
_ => match iter.peek() {
Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
iter.next();
match iter.next() {
Some(NodeOrToken::Token(value_token))
if (value_token.kind() == syntax::SyntaxKind::STRING) =>
{
let value = value_token.text();
Some(CfgExpr::Atom(CfgAtom::KeyValue {
key: name,
value: Symbol::intern(value.trim_matches('"')),
}))
}
_ => None,
}
}
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
},
};
if let Some(NodeOrToken::Token(element)) = iter.peek()
&& element.kind() == syntax::T![,]
{
iter.next();
}
result
}
#[cfg(test)]
mod tests {
use cfg::DnfExpr;
use expect_test::{Expect, expect};
use syntax::{AstNode, SourceFile, ast::Attr};
use crate::cfg_process::parse_from_attr_token_tree;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input, span::Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it,
None => {
let node = std::any::type_name::<Attr>();
panic!("Failed to make ast node `{node}` from text {input}")
}
};
let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into());
let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}
#[test]
fn cfg_from_attr() {
check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
}
let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
krate.cfg_options(db).check(&cfg_expr)
}

View file

@ -1,11 +1,9 @@
//! Defines database & queries for macro expansion.
use base_db::{Crate, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
@ -13,9 +11,9 @@ use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
attrs::Meta,
builtin::pseudo_derive_attr_expansion,
cfg_process,
cfg_process::attr_macro_input_to_token_tree,
declarative::DeclarativeMacroExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@ -177,7 +175,7 @@ pub fn expand_speculative(
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
let (mut tt, undo_info) = match &loc.kind {
MacroCallKind::FnLike { .. } => (
syntax_bridge::syntax_node_to_token_tree(
speculative_args,
@ -200,48 +198,35 @@ pub fn expand_speculative(
),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { derive_attr_index: index, .. }
| MacroCallKind::Attr { invoc_attr_index: index, .. } => {
let censor = if let MacroCallKind::Derive { .. } = loc.kind {
censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
} else {
attr_source(index, &ast::Item::cast(speculative_args.clone())?)
.into_iter()
.map(|it| it.syntax().clone().into())
.collect()
MacroCallKind::Derive { derive_macro_id, .. } => {
let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } =
&derive_macro_id.loc(db).kind
else {
unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`");
};
let censor_cfg =
cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(
span_map,
attr_macro_input_to_token_tree(
db,
speculative_args,
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
});
fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
(
syntax_bridge::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
true,
attr_ids,
loc.krate,
)
}
MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree(
db,
speculative_args,
span_map,
span,
false,
attr_ids,
loc.krate,
),
};
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr_arg = match &loc.kind {
MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
@ -260,18 +245,21 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
match attr.input.as_deref()? {
AttrInput::TokenTree(tt) => {
let mut attr_arg = tt.clone();
attr_arg.top_subtree_delimiter_mut().kind =
tt::DelimiterKind::Invisible;
Some(attr_arg)
}
AttrInput::Literal(_) => None,
let (_, _, _, meta) =
attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
match meta {
Meta::TokenTree { tt, .. } => {
let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
Some(attr_arg)
}
})
_ => None,
}
}
}
_ => None,
@ -430,7 +418,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let (censor, item_node, span) = match loc.kind {
let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let path_range = node
@ -498,53 +486,29 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
MacroCallKind::Derive { .. } => {
unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`")
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
let attr_source = attr_source(invoc_attr_index, &node);
let range = attr_ids
.invoc_attr()
.find_attr_range_with_source(db, loc.krate, &node)
.3
.path_range();
let span = map.span_for_range(range);
let span = map.span_for_range(
attr_source
.as_ref()
.and_then(|it| it.path())
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
);
// If derive attribute we need to censor the derive input
if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
&& ast::Adt::can_cast(node.syntax().kind())
{
let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
(censor_derive_input, node, span)
} else {
(attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
}
let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
(is_derive, &**attr_ids, node, span)
}
};
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
let mut fixups =
fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
});
fixups.remove.extend(censor);
fixups.remove.extend(censor_cfg);
(
syntax_bridge::syntax_node_to_token_tree_modified(
syntax,
map,
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
)
};
let (mut tt, undo_info) = attr_macro_input_to_token_tree(
db,
item_node.syntax(),
map.as_ref(),
span,
is_derive,
censor_item_tree_attr_ids,
loc.krate,
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@ -554,31 +518,6 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
(Arc::new(tt), undo_info, span)
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(derive_censoring);
collect_attrs(node)
.take(derive_attr_index.ast_index() + 1)
.filter_map(|(_, attr)| Either::left(attr))
// FIXME, this resolution should not be done syntactically
// derive is a proper macro now, no longer builtin
// But we do not have resolution at this stage, this means
// we need to know about all macro calls for the given ast item here
// so we require some kind of mapping...
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone().into())
.collect()
}
/// Attributes expect the invoking attribute to be stripped
fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(attribute_macro_attr_censoring);
collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
}
impl TokenExpander {
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {

View file

@ -1,17 +1,21 @@
//! Compiled declarative macro expanders (`macro_rules!` and `macro`)
use std::{cell::OnceCell, ops::ControlFlow};
use base_db::Crate;
use intern::sym;
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, ast};
use syntax::{
AstNode, AstToken,
ast::{self, HasAttrs},
};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
MacroCallStyle,
attrs::RawAttrs,
attrs::{Meta, expand_cfg_attr},
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
tt,
@ -83,29 +87,28 @@ impl DeclarativeMacroExpander {
let (root, map) = crate::db::parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
match attrs
.iter()
.find(|it| {
it.path
.as_ident()
.map(|it| *it == sym::rustc_macro_transparency)
.unwrap_or(false)
})?
.token_tree_value()?
.token_trees()
.flat_tokens()
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
s if *s == sym::transparent => Some(Transparency::Transparent),
s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
s if *s == sym::opaque => Some(Transparency::Opaque),
_ => None,
let transparency = |node: ast::AnyHasAttrs| {
let cfg_options = OnceCell::new();
expand_cfg_attr(
node.attrs(),
|| cfg_options.get_or_init(|| def_crate.cfg_options(db)),
|attr, _, _, _| {
if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
&& name.text() == "rustc_macro_transparency"
&& let Some(value) = value.and_then(ast::String::cast)
&& let Ok(value) = value.value()
{
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
"semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
"opaque" => ControlFlow::Break(Transparency::Opaque),
_ => ControlFlow::Continue(()),
}
} else {
ControlFlow::Continue(())
}
},
_ => None,
}
)
};
let ctx_edition = |ctx: SyntaxContext| {
if ctx.is_root() {
@ -136,7 +139,8 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
transparency(ast::AnyHasAttrs::from(macro_rules))
.unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
@ -164,7 +168,7 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
transparency(macro_def.into()).unwrap_or(Transparency::Opaque),
),
};
let edition = ctx_edition(match id.file_id {

View file

@ -55,30 +55,6 @@ impl From<FilePosition> for HirFilePosition {
}
}
impl FilePositionWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
FilePositionWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
offset: self.offset,
}
}
}
impl FileRangeWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
FileRangeWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
range: self.range,
}
}
}
impl<T> InFileWrapper<span::FileId, T> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
}
}
impl HirFileRange {
pub fn file_range(self) -> Option<FileRange> {
Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
@ -407,7 +383,7 @@ impl InFile<SyntaxToken> {
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
loc.kind.original_call_range(db, loc.krate)
}
}
}
@ -453,7 +429,10 @@ impl InFile<TextRange> {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
(
loc.kind.original_call_range(db, loc.krate),
SyntaxContext::root(loc.def.edition),
)
}
}
}
@ -468,7 +447,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
loc.kind.original_call_range(db, loc.krate)
}
}
}

View file

@ -523,6 +523,7 @@ mod tests {
fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
|_, _| (true, Vec::new()),
);
let actual = format!("{tt}\n");
@ -697,7 +698,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {a . __ra_fixup ;}
fn foo () {a .__ra_fixup ;}
"#]],
)
}
@ -712,7 +713,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {a . __ra_fixup ; bar () ;}
fn foo () {a .__ra_fixup ; bar () ;}
"#]],
)
}

View file

@ -25,18 +25,17 @@ mod cfg_process;
mod fixup;
mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
use thin_vec::ThinVec;
use triomphe::Arc;
use core::fmt;
use std::hash::Hash;
use std::{hash::Hash, ops};
use base_db::Crate;
use either::Either;
use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@ -317,9 +316,6 @@ pub enum MacroCallKind {
Derive {
ast_id: AstId<ast::Adt>,
/// Syntactical index of the invoking `#[derive]` attribute.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
derive_attr_index: AttrId,
/// Index of the derive macro in the derive attribute
derive_index: u32,
@ -329,17 +325,68 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
// but we need to fix the `cfg_attr` handling first.
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`.
attr_args: Option<Arc<tt::TopSubtree>>,
/// Syntactical index of the invoking `#[attribute]`.
/// This contains the list of all *active* attributes (derives and attr macros) preceding this
/// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute
/// by calling [`invoc_attr()`] on this.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
invoc_attr_index: AttrId,
/// The macro should not see the attributes here.
///
/// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr
censored_attr_ids: AttrMacroAttrIds,
},
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr);
impl AttrMacroAttrIds {
#[inline]
pub fn from_one(id: AttrId) -> Self {
Self(AttrMacroAttrIdsRepr::One(id))
}
#[inline]
pub fn from_many(ids: &[AttrId]) -> Self {
if let &[id] = ids {
Self(AttrMacroAttrIdsRepr::One(id))
} else {
Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect()))
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum AttrMacroAttrIdsRepr {
One(AttrId),
ManyDerives(ThinVec<AttrId>),
}
impl ops::Deref for AttrMacroAttrIds {
type Target = [AttrId];
#[inline]
fn deref(&self) -> &Self::Target {
match &self.0 {
AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one),
AttrMacroAttrIdsRepr::ManyDerives(many) => many,
}
}
}
impl AttrMacroAttrIds {
#[inline]
pub fn invoc_attr(&self) -> AttrId {
match &self.0 {
AttrMacroAttrIdsRepr::One(it) => *it,
AttrMacroAttrIdsRepr::ManyDerives(it) => {
*it.last().expect("should always have at least one `AttrId`")
}
}
}
}
impl MacroCallKind {
pub(crate) fn call_style(&self) -> MacroCallStyle {
match self {
@ -597,34 +644,20 @@ impl MacroDefId {
impl MacroCallLoc {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind {
match &self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
collect_attrs(&it)
.nth(derive_attr_index.ast_index())
.and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
.unwrap_or_else(|| it.syntax().clone())
})
let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
.and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
.unwrap_or_else(|| it.syntax().clone())
})
let (attr, _, _, _) =
attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
} else {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
@ -729,7 +762,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
/// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@ -751,24 +784,11 @@ impl MacroCallKind {
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
.1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
derive_attr_index.find_attr_range(db, krate, ast_id).2
}
// FIXME: handle `cfg_attr`
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
.expect("missing attribute")
.1
.expect_left("attribute macro is a doc comment?")
.syntax()
.text_range()
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
}
};
@ -887,7 +907,8 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let file_id =
EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@ -943,7 +964,7 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
@ -969,36 +990,12 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
pub fn map_node_range_up_aggregated(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
let mut map = FxHashMap::default();
for span in exp_map.spans_for_range(range) {
let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
*range = TextRange::new(
range.start().min(span.range.start()),
range.end().max(span.range.end()),
);
}
for ((anchor, _), range) in &mut map {
let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
}
/// Looks up the span at the given offset.
pub fn span_for_offset(
db: &dyn ExpandDatabase,
@ -1006,7 +1003,7 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
(FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
@ -1076,7 +1073,7 @@ impl ExpandTo {
}
}
intern::impl_internable!(ModPath, attrs::AttrInput);
intern::impl_internable!(ModPath);
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
@ -1139,6 +1136,14 @@ impl HirFileId {
HirFileId::MacroFile(_) => None,
}
}
#[inline]
pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
match self {
HirFileId::FileId(it) => it.krate(db),
HirFileId::MacroFile(it) => it.loc(db).krate,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {

View file

@ -2,7 +2,7 @@
use std::{
fmt::{self, Display as _},
iter,
iter::{self, Peekable},
};
use crate::{
@ -12,10 +12,11 @@ use crate::{
tt,
};
use base_db::Crate;
use intern::sym;
use intern::{Symbol, sym};
use parser::T;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstNode, ast};
use syntax::{AstNode, SyntaxToken, ast};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath {
@ -64,6 +65,58 @@ impl ModPath {
ModPath { kind, segments: SmallVec::new_const() }
}
pub fn from_tokens(
db: &dyn ExpandDatabase,
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
is_abs: bool,
segments: impl Iterator<Item = SyntaxToken>,
) -> Option<ModPath> {
let mut segments = segments.peekable();
let mut result = SmallVec::new_const();
let path_kind = if is_abs {
PathKind::Abs
} else {
let first = segments.next()?;
match first.kind() {
T![crate] => PathKind::Crate,
T![self] => PathKind::Super(handle_super(&mut segments)),
T![super] => PathKind::Super(1 + handle_super(&mut segments)),
T![ident] => {
let first_text = first.text();
if first_text == "$crate" {
let ctxt = span_for_range(first.text_range());
resolve_crate_root(db, ctxt)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate)
} else {
result.push(Name::new_symbol_root(Symbol::intern(first_text)));
PathKind::Plain
}
}
_ => return None,
}
};
for segment in segments {
if segment.kind() != T![ident] {
return None;
}
result.push(Name::new_symbol_root(Symbol::intern(segment.text())));
}
if result.is_empty() {
return None;
}
result.shrink_to_fit();
return Some(ModPath { kind: path_kind, segments: result });
fn handle_super(segments: &mut Peekable<impl Iterator<Item = SyntaxToken>>) -> u8 {
let mut result = 0;
while segments.next_if(|it| it.kind() == T![super]).is_some() {
result += 1;
}
result
}
}
pub fn segments(&self) -> &[Name] {
&self.segments
}

View file

@ -1,13 +1,12 @@
//! Span maps for real files and macro expansions.
use span::{Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@ -110,26 +109,24 @@ pub(crate) fn real_span_map(
// them anchors too, but only if they have no attributes attached, as those might be proc-macros
// and using different anchors inside of them will prevent spans from being joinable.
tree.items().for_each(|item| match &item {
ast::Item::ExternBlock(it)
if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
{
ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(extern_item_list) = it.extern_item_list() {
pairs.extend(
extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
);
}
}
ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
}
ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(item_list) = it.item_list() {
pairs.extend(item_list.items().map(item_to_entry));
}
}
ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}

View file

@ -6,6 +6,7 @@ mod tests;
use base_db::Crate;
use hir_def::{
ConstId, EnumVariantId, GeneralConstId, StaticId,
attrs::AttrFlags,
expr_store::Body,
hir::{Expr, ExprId},
type_ref::LiteralConstRef,
@ -200,7 +201,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
return Ok(value);
}
let repr = db.enum_signature(loc.parent).repr;
let repr = AttrFlags::repr(db, loc.parent.into());
let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
let mir_body = db.monomorphized_mir_body(

View file

@ -17,8 +17,8 @@ use std::fmt;
use hir_def::{
AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
HirFileId,
@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> {
// Don't run the lint on extern "[not Rust]" fn items with the
// #[no_mangle] attribute.
let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE);
if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
cov_mark::hit!(extern_func_no_mangle_ignored);
} else {
@ -563,7 +563,7 @@ impl<'a> DeclValidator<'a> {
cov_mark::hit!(extern_static_incorrect_case_ignored);
return;
}
if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() {
if AttrFlags::query(self.db, static_id.into()).contains(AttrFlags::NO_MANGLE) {
cov_mark::hit!(no_mangle_static_incorrect_case_ignored);
return;
}

View file

@ -2,7 +2,9 @@
use std::{cell::LazyCell, fmt};
use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use hir_def::{
EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags,
};
use intern::sym;
use rustc_pattern_analysis::{
IndexVec, PatCx, PrivateUninhabitedField,
@ -118,7 +120,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
let is_local = adt.krate(self.db) == self.module.krate();
!is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
!is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE)
}
fn variant_id_for_adt(

View file

@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> {
inside_assignment: bool,
inside_union_destructure: bool,
callback: &'db mut dyn FnMut(UnsafeDiagnostic),
def_target_features: TargetFeatures,
def_target_features: TargetFeatures<'db>,
// FIXME: This needs to be the edition of the span of each call.
edition: Edition,
/// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> {
) -> Self {
let resolver = def.resolver(db);
let def_target_features = match def {
DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func),
_ => TargetFeatures::default(),
};
let krate = resolver.module().krate();

View file

@ -40,7 +40,7 @@ use hir_def::{
lang_item::LangItems,
layout::Integer,
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
signatures::{ConstSignature, StaticSignature},
signatures::{ConstSignature, EnumSignature, StaticSignature},
type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId},
};
use hir_expand::{mod_path::ModPath, name::Name};
@ -108,7 +108,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() {
ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
hir_def::layout::IntegerType::Pointer(signed) => match signed {
true => ctx.types.isize,
false => ctx.types.usize,
@ -829,7 +829,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
/// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
pub(crate) unstable_features: MethodResolutionUnstableFeatures,
pub(crate) edition: Edition,
pub(crate) generic_def: GenericDefId,
@ -975,12 +975,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.resolver.krate()
}
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
let target_features = match self.owner {
DefWithBodyId::FunctionId(id) => {
TargetFeatures::from_attrs(&self.db.attrs(id.into()))
}
DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(self.db, id),
_ => TargetFeatures::default(),
};
let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {

View file

@ -37,10 +37,10 @@
use hir_def::{
CallableDefId,
attrs::AttrFlags,
hir::{ExprId, ExprOrPatId},
signatures::FunctionSignature,
};
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
@ -78,7 +78,7 @@ use crate::{
trait CoerceDelegate<'db> {
fn infcx(&self) -> &InferCtxt<'db>;
fn env(&self) -> &TraitEnvironment<'db>;
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget);
fn set_diverging(&mut self, diverging_ty: Ty<'db>);
@ -852,14 +852,14 @@ where
return Err(TypeError::IntrinsicCast);
}
let attrs = self.db().attrs(def_id.into());
if attrs.by_key(sym::rustc_force_inline).exists() {
let attrs = AttrFlags::query(self.db(), def_id.into());
if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) {
return Err(TypeError::ForceInlineCast);
}
if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
let fn_target_features =
TargetFeatures::from_attrs_no_implications(&attrs);
TargetFeatures::from_fn_no_implications(self.db(), def_id);
// Allow the coercion if the current function has all the features that would be
// needed to call the coercee safely.
let (target_features, target_feature_is_safe) =
@ -978,8 +978,9 @@ impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
&self.0.table.trait_env
}
#[inline]
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
self.0.target_features()
}
@ -1072,7 +1073,7 @@ impl<'db> InferenceContext<'_, 'db> {
let is_force_inline = |ty: Ty<'db>| {
if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE)
} else {
false
}
@ -1548,7 +1549,7 @@ pub fn could_coerce<'db>(
struct HirCoercionDelegate<'a, 'db> {
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
target_features: &'a TargetFeatures,
target_features: &'a TargetFeatures<'db>,
}
impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
@ -1560,7 +1561,7 @@ impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
self.env
}
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
(self.target_features, TargetFeatureIsSafeInTarget::No)
}
fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}

View file

@ -2188,9 +2188,11 @@ impl<'db> InferenceContext<'_, 'db> {
};
let data = self.db.function_signature(func);
let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
else {
return Default::default();
};
let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
// only use legacy const generics if the param count matches with them
if data.params.len() + legacy_const_generics_indices.len() != args.len() {
@ -2199,9 +2201,8 @@ impl<'db> InferenceContext<'_, 'db> {
} else {
// there are more parameters than there should be without legacy
// const params; use them
let mut indices = legacy_const_generics_indices.as_ref().clone();
indices.sort();
return indices;
legacy_const_generics_indices.sort_unstable();
return legacy_const_generics_indices;
}
}
@ -2214,9 +2215,8 @@ impl<'db> InferenceContext<'_, 'db> {
self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
// FIXME: evaluate and unify with the const
}
let mut indices = legacy_const_generics_indices.as_ref().clone();
indices.sort();
indices
legacy_const_generics_indices.sort_unstable();
legacy_const_generics_indices
}
pub(super) fn with_breakable_ctx<T>(

View file

@ -4,6 +4,7 @@ use std::fmt;
use hir_def::{
AdtId, LocalFieldId, StructId,
attrs::AttrFlags,
layout::{LayoutCalculatorError, LayoutData},
};
use la_arena::{Idx, RawIdx};
@ -174,8 +175,7 @@ pub fn layout_of_ty_query<'db>(
TyKind::Adt(def, args) => {
match def.inner().id {
hir_def::AdtId::StructId(s) => {
let data = db.struct_signature(s);
let repr = data.repr.unwrap_or_default();
let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
}

View file

@ -4,9 +4,9 @@ use std::{cmp, ops::Bound};
use hir_def::{
AdtId, VariantId,
attrs::AttrFlags,
signatures::{StructFlags, VariantFields},
};
use intern::sym;
use rustc_abi::{Integer, ReprOptions, TargetDataLayout};
use rustc_index::IndexVec;
use smallvec::SmallVec;
@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>(
r.push(handle_variant(s.into(), s.fields(db))?);
(
r,
sig.repr.unwrap_or_default(),
AttrFlags::repr(db, s.into()).unwrap_or_default(),
sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
)
}
AdtId::UnionId(id) => {
let data = db.union_signature(id);
let repr = AttrFlags::repr(db, id.into());
let mut r = SmallVec::new();
r.push(handle_variant(id.into(), id.fields(db))?);
(r, data.repr.unwrap_or_default(), false)
(r, repr.unwrap_or_default(), false)
}
AdtId::EnumId(e) => {
let variants = e.enum_variants(db);
@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>(
.iter()
.map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
.collect::<Result<SmallVec<_>, _>>()?;
(r, db.enum_signature(e).repr.unwrap_or_default(), false)
(r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false)
}
};
let variants = variants
@ -105,27 +105,12 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
}
fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
let attrs = db.attrs(def.into());
let get = |name| {
let attr = attrs.by_key(name).tt_values();
for tree in attr {
if let Some(it) = tree.iter().next_as_view() {
let text = it.to_string().replace('_', "");
let (text, base) = match text.as_bytes() {
[b'0', b'x', ..] => (&text[2..], 16),
[b'0', b'o', ..] => (&text[2..], 8),
[b'0', b'b', ..] => (&text[2..], 2),
_ => (&*text, 10),
};
if let Ok(it) = u128::from_str_radix(text, base) {
return Bound::Included(it);
}
}
}
Bound::Unbounded
let range = AttrFlags::rustc_layout_scalar_valid_range(db, def);
let get = |value| match value {
Some(it) => Bound::Included(it),
None => Bound::Unbounded,
};
(get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
(get(range.start), get(range.end))
}
/// Finds the appropriate Integer type and signedness for the given

View file

@ -15,6 +15,7 @@ use base_db::Crate;
use hir_def::{
AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
ModuleId, TraitId,
attrs::AttrFlags,
expr_store::path::GenericArgs as HirGenericArgs,
hir::ExprId,
nameres::{DefMap, block_def_map, crate_def_map},
@ -509,9 +510,8 @@ fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Cra
pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
let has_incoherent_impls = match self_ty.def() {
Some(def_id) => match def_id.try_into() {
Ok(def_id) => {
db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists()
}
Ok(def_id) => AttrFlags::query(db, def_id)
.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
Err(()) => true,
},
_ => true,
@ -715,7 +715,9 @@ impl TraitImpls {
// FIXME: Reservation impls should be considered during coherence checks. If we are
// (ever) to implement coherence checks, this filtering should be done by the trait
// solver.
if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
if AttrFlags::query(db, impl_id.into())
.contains(AttrFlags::RUSTC_RESERVATION_IMPL)
{
continue;
}
let trait_ref = match db.impl_trait(impl_id) {

View file

@ -3,9 +3,9 @@
//!
use std::cmp::{self, Ordering};
use hir_def::signatures::FunctionSignature;
use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
use hir_expand::name::Name;
use intern::{Symbol, sym};
use intern::sym;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use stdx::never;
@ -60,7 +60,7 @@ impl<'db> Evaluator<'db> {
}
let function_data = self.db.function_signature(def);
let attrs = self.db.attrs(def.into());
let attrs = AttrFlags::query(self.db, def.into());
let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
if is_intrinsic {
@ -72,7 +72,7 @@ impl<'db> Evaluator<'db> {
locals,
span,
!function_data.has_body()
|| attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
|| attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
);
}
let is_extern_c = match def.lookup(self.db).container {
@ -92,18 +92,13 @@ impl<'db> Evaluator<'db> {
.map(|()| true);
}
let alloc_fn =
attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| {
[
&sym::rustc_allocator,
&sym::rustc_deallocator,
&sym::rustc_reallocator,
&sym::rustc_allocator_zeroed,
]
.contains(it)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
if attrs.intersects(
AttrFlags::RUSTC_ALLOCATOR
| AttrFlags::RUSTC_DEALLOCATOR
| AttrFlags::RUSTC_REALLOCATOR
| AttrFlags::RUSTC_ALLOCATOR_ZEROED,
) {
self.exec_alloc_fn(attrs, args, destination)?;
return Ok(true);
}
if let Some(it) = self.detect_lang_function(def) {
@ -248,12 +243,14 @@ impl<'db> Evaluator<'db> {
fn exec_alloc_fn(
&mut self,
alloc_fn: &Symbol,
alloc_fn: AttrFlags,
args: &[IntervalAndTy<'db>],
destination: Interval,
) -> Result<'db, ()> {
match alloc_fn {
_ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
_ if alloc_fn
.intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
{
let [size, align] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@ -264,8 +261,8 @@ impl<'db> Evaluator<'db> {
let result = self.heap_allocate(size, align)?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
_ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ }
_ if *alloc_fn == sym::rustc_reallocator => {
_ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ }
_ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@ -292,9 +289,9 @@ impl<'db> Evaluator<'db> {
fn detect_lang_function(&self, def: FunctionId) -> Option<EvalLangItem> {
use EvalLangItem::*;
let lang_items = self.lang_items();
let attrs = self.db.attrs(def.into());
let attrs = AttrFlags::query(self.db, def.into());
if attrs.by_key(sym::rustc_const_panic_str).exists() {
if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
// `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
return Some(BeginPanic);
}

View file

@ -10,6 +10,7 @@ use base_db::Crate;
use hir_def::{
AdtId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId,
StructId, UnionId, VariantId,
attrs::AttrFlags,
lang_item::LangItems,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
@ -507,28 +508,28 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))];
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = data.repr(db, struct_id);
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}
AdtId::UnionId(union_id) => {
let data = db.union_signature(union_id);
let flags = AdtFlags {
is_enum: false,
is_union: true,
@ -541,22 +542,24 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))];
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = AttrFlags::repr(db, union_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}
@ -580,24 +583,26 @@ impl AdtDef {
.map(|(idx, v)| (idx, VariantDef::Enum(v.0)))
.collect();
let data = db.enum_signature(enum_id);
let mut repr = ReprOptions::default();
repr.align = data.repr.and_then(|r| r.align);
repr.pack = data.repr.and_then(|r| r.pack);
repr.int = data.repr.and_then(|r| r.int);
let data_repr = AttrFlags::repr(db, enum_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
if data.repr.is_some_and(|r| r.c()) {
if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
if data.repr.is_some_and(|r| r.simd()) {
if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
repr.flags = repr_flags;
let repr = ReprOptions {
align: data_repr.and_then(|r| r.align),
pack: data_repr.and_then(|r| r.pack),
int: data_repr.and_then(|r| r.int),
flags: repr_flags,
..ReprOptions::default()
};
(flags, variants, repr)
}

View file

@ -1,31 +1,35 @@
//! Stuff for handling `#[target_feature]` (needed for unsafe check).
use std::borrow::Cow;
use std::sync::LazyLock;
use hir_def::attr::Attrs;
use hir_def::tt;
use intern::{Symbol, sym};
use hir_def::FunctionId;
use hir_def::attrs::AttrFlags;
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::db::HirDatabase;
#[derive(Debug, Default, Clone)]
pub struct TargetFeatures {
pub(crate) enabled: FxHashSet<Symbol>,
pub struct TargetFeatures<'db> {
pub(crate) enabled: Cow<'db, FxHashSet<Symbol>>,
}
impl TargetFeatures {
pub fn from_attrs(attrs: &Attrs) -> Self {
let mut result = TargetFeatures::from_attrs_no_implications(attrs);
impl<'db> TargetFeatures<'db> {
pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
let mut result = TargetFeatures::from_fn_no_implications(db, owner);
result.expand_implications();
result
}
fn expand_implications(&mut self) {
let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
let enabled = self.enabled.to_mut();
let mut queue = enabled.iter().cloned().collect::<Vec<_>>();
while let Some(feature) = queue.pop() {
if let Some(implications) = all_implications.get(&feature) {
for implication in implications {
if self.enabled.insert(implication.clone()) {
if enabled.insert(implication.clone()) {
queue.push(implication.clone());
}
}
@ -34,25 +38,9 @@ impl TargetFeatures {
}
/// Retrieves the target features from the attributes, and does not expand the target features implied by them.
pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
let enabled = attrs
.by_key(sym::target_feature)
.tt_values()
.filter_map(|tt| match tt.token_trees().flat_tokens() {
[
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str,
symbol: features,
..
})),
] if enable_ident.sym == sym::enable => Some(features),
_ => None,
})
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
.collect();
Self { enabled }
pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
let enabled = AttrFlags::target_features(db, owner);
Self { enabled: Cow::Borrowed(enabled) }
}
}

View file

@ -31,7 +31,6 @@ fn foo() -> i32 {
&[("infer_shim", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -40,7 +39,7 @@ fn foo() -> i32 {
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
@ -80,7 +79,7 @@ fn foo() -> i32 {
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
"attrs_shim",
"AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
@ -119,7 +118,6 @@ fn baz() -> i32 {
&[("infer_shim", 3)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -128,14 +126,14 @@ fn baz() -> i32 {
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"lang_items",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
@ -193,13 +191,13 @@ fn baz() -> i32 {
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
"attrs_shim",
"AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
"attrs_shim",
"attrs_shim",
"AttrFlags::query_",
"AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
@ -239,7 +237,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -311,7 +308,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -384,7 +380,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -458,7 +453,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -507,17 +501,17 @@ impl SomeStruct {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
"attrs_shim",
"AttrFlags::query_",
"impl_trait_with_diagnostics_shim",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"lang_items",
"crate_lang_items",
"ImplItems::of_",
"attrs_shim",
"attrs_shim",
"attrs_shim",
"attrs_shim",
"AttrFlags::query_",
"AttrFlags::query_",
"AttrFlags::query_",
"AttrFlags::query_",
"impl_self_ty_with_diagnostics_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
@ -570,7 +564,6 @@ fn main() {
&[("trait_solve_shim", 0)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@ -579,22 +572,22 @@ fn main() {
"TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"lang_items",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
@ -675,21 +668,21 @@ fn main() {
"crate_local_def_map",
"TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"body_shim",
"ImplItems::of_",
"infer_shim",
"attrs_shim",
"AttrFlags::query_",
"trait_signature_with_source_map_shim",
"attrs_shim",
"AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
"attrs_shim",
"AttrFlags::query_",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",

View file

@ -6,6 +6,7 @@ use std::cell::LazyCell;
use base_db::target::{self, TargetData};
use hir_def::{
EnumId, EnumVariantId, FunctionId, Lookup, TraitId,
attrs::AttrFlags,
db::DefDatabase,
hir::generics::WherePredicate,
lang_item::LangItems,
@ -114,7 +115,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS
pub fn is_fn_unsafe_to_call(
db: &dyn HirDatabase,
func: FunctionId,
caller_target_features: &TargetFeatures,
caller_target_features: &TargetFeatures<'_>,
call_edition: Edition,
target_feature_is_safe: TargetFeatureIsSafeInTarget,
) -> Unsafety {
@ -125,8 +126,7 @@ pub fn is_fn_unsafe_to_call(
if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No {
// RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
let callee_target_features =
TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
let callee_target_features = TargetFeatures::from_fn_no_implications(db, func);
if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
return Unsafety::Unsafe;
}
@ -147,7 +147,7 @@ pub fn is_fn_unsafe_to_call(
if is_intrinsic_block {
// legacy intrinsics
// extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() {
if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) {
Unsafety::Safe
} else {
Unsafety::Unsafe

View file

@ -1,8 +1,11 @@
//! Attributes & documentation for hir types.
use cfg::CfgExpr;
use either::Either;
use hir_def::{
AssocItemId, AttrDefId, ModuleDefId,
attr::AttrsWithOwner,
AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId,
TypeOrConstParamId,
attrs::{AttrFlags, Docs, IsInnerDoc},
expr_store::path::Path,
item_scope::ItemInNs,
per_ns::Namespace,
@ -19,35 +22,169 @@ use hir_ty::{
},
next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
};
use intern::Symbol;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static,
Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
Field, Function, GenericParam, HasCrate, Impl, LangItem, LifetimeParam, Macro, Module,
ModuleDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
};
pub trait HasAttrs {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
#[derive(Debug, Clone, Copy)]
pub enum AttrsOwner {
AttrDef(AttrDefId),
Field(FieldId),
LifetimeParam(LifetimeParamId),
TypeOrConstParam(TypeOrConstParamId),
}
impl AttrsOwner {
#[inline]
fn attr_def(&self) -> Option<AttrDefId> {
match self {
AttrsOwner::AttrDef(it) => Some(*it),
_ => None,
}
}
}
#[derive(Debug, Clone)]
pub struct AttrsWithOwner {
pub(crate) attrs: AttrFlags,
owner: AttrsOwner,
}
impl AttrsWithOwner {
fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self {
Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) }
}
fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self {
Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) }
}
fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self {
Self {
attrs: AttrFlags::query_lifetime_param(db, owner),
owner: AttrsOwner::LifetimeParam(owner),
}
}
fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self {
Self {
attrs: AttrFlags::query_type_or_const_param(db, owner),
owner: AttrsOwner::TypeOrConstParam(owner),
}
}
#[inline]
pub fn is_unstable(&self) -> bool {
self.attrs.contains(AttrFlags::IS_UNSTABLE)
}
#[inline]
pub fn is_macro_export(&self) -> bool {
self.attrs.contains(AttrFlags::IS_MACRO_EXPORT)
}
#[inline]
pub fn is_doc_notable_trait(&self) -> bool {
self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
}
#[inline]
pub fn is_doc_hidden(&self) -> bool {
self.attrs.contains(AttrFlags::IS_DOC_HIDDEN)
}
#[inline]
pub fn is_deprecated(&self) -> bool {
self.attrs.contains(AttrFlags::IS_DEPRECATED)
}
#[inline]
pub fn is_non_exhaustive(&self) -> bool {
self.attrs.contains(AttrFlags::NON_EXHAUSTIVE)
}
#[inline]
pub fn is_test(&self) -> bool {
self.attrs.contains(AttrFlags::IS_TEST)
}
#[inline]
pub fn lang(&self, db: &dyn HirDatabase) -> Option<LangItem> {
self.owner
.attr_def()
.and_then(|owner| self.attrs.lang_item_with_attrs(db, owner))
.and_then(|lang| LangItem::from_symbol(&lang))
}
#[inline]
pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] {
let owner = match self.owner {
AttrsOwner::AttrDef(it) => Either::Left(it),
AttrsOwner::Field(it) => Either::Right(it),
AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[],
};
self.attrs.doc_aliases(db, owner)
}
#[inline]
pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> {
let owner = match self.owner {
AttrsOwner::AttrDef(it) => Either::Left(it),
AttrsOwner::Field(it) => Either::Right(it),
AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
};
self.attrs.cfgs(db, owner)
}
#[inline]
pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> {
match self.owner {
AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
}
}
}
pub trait HasAttrs: Sized {
#[inline]
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
match self.attr_id(db) {
AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it),
AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it),
AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it),
AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it),
}
}
#[doc(hidden)]
fn attr_id(self) -> AttrDefId;
fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner;
#[inline]
fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> {
match self.attr_id(db) {
AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
}
}
}
macro_rules! impl_has_attrs {
($(($def:ident, $def_id:ident),)*) => {$(
impl HasAttrs for $def {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
let def = AttrDefId::$def_id(self.into());
AttrsWithOwner::new(db, def)
}
fn attr_id(self) -> AttrDefId {
AttrDefId::$def_id(self.into())
#[inline]
fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
AttrsOwner::AttrDef(AttrDefId::$def_id(self.into()))
}
}
)*};
}
impl_has_attrs![
(Field, FieldId),
(Variant, EnumVariantId),
(Static, StaticId),
(Const, ConstId),
@ -56,8 +193,6 @@ impl_has_attrs![
(Macro, MacroId),
(Function, FunctionId),
(Adt, AdtId),
(Module, ModuleId),
(GenericParam, GenericParamId),
(Impl, ImplId),
(ExternCrateDecl, ExternCrateId),
];
@ -65,11 +200,9 @@ impl_has_attrs![
macro_rules! impl_has_attrs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasAttrs for $variant {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
$enum::$variant(self).attrs(db)
}
fn attr_id(self) -> AttrDefId {
$enum::$variant(self).attr_id()
#[inline]
fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
$enum::$variant(self).attr_id(db)
}
}
)*};
@ -78,30 +211,46 @@ macro_rules! impl_has_attrs_enum {
impl_has_attrs_enum![Struct, Union, Enum for Adt];
impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
impl HasAttrs for AssocItem {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
impl HasAttrs for Module {
#[inline]
fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id)))
}
}
impl HasAttrs for GenericParam {
#[inline]
fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
match self {
AssocItem::Function(it) => it.attrs(db),
AssocItem::Const(it) => it.attrs(db),
AssocItem::TypeAlias(it) => it.attrs(db),
GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()),
}
}
fn attr_id(self) -> AttrDefId {
}
impl HasAttrs for AssocItem {
#[inline]
fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
match self {
AssocItem::Function(it) => it.attr_id(),
AssocItem::Const(it) => it.attr_id(),
AssocItem::TypeAlias(it) => it.attr_id(),
AssocItem::Function(it) => it.attr_id(db),
AssocItem::Const(it) => it.attr_id(db),
AssocItem::TypeAlias(it) => it.attr_id(db),
}
}
}
impl HasAttrs for crate::Crate {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
let def = AttrDefId::ModuleId(self.root_module().id);
AttrsWithOwner::new(db, def)
#[inline]
fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
self.root_module().attr_id(db)
}
fn attr_id(self) -> AttrDefId {
AttrDefId::ModuleId(self.root_module().id)
}
impl HasAttrs for Field {
#[inline]
fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
AttrsOwner::Field(self.into())
}
}
@ -111,21 +260,22 @@ pub fn resolve_doc_path_on(
def: impl HasAttrs + Copy,
link: &str,
ns: Option<Namespace>,
is_inner_doc: bool,
is_inner_doc: IsInnerDoc,
) -> Option<DocLinkDef> {
resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc)
resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc)
}
fn resolve_doc_path_on_(
db: &dyn HirDatabase,
link: &str,
attr_id: AttrDefId,
attr_id: AttrsOwner,
ns: Option<Namespace>,
is_inner_doc: bool,
is_inner_doc: IsInnerDoc,
) -> Option<DocLinkDef> {
let resolver = match attr_id {
AttrDefId::ModuleId(it) => {
if is_inner_doc {
AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => {
let it = it.loc(db);
if is_inner_doc.yes() {
it.resolver(db)
} else if let Some(parent) = Module::from(it).parent(db) {
parent.id.resolver(db)
@ -133,20 +283,20 @@ fn resolve_doc_path_on_(
it.resolver(db)
}
}
AttrDefId::FieldId(it) => it.parent.resolver(db),
AttrDefId::AdtId(it) => it.resolver(db),
AttrDefId::FunctionId(it) => it.resolver(db),
AttrDefId::EnumVariantId(it) => it.resolver(db),
AttrDefId::StaticId(it) => it.resolver(db),
AttrDefId::ConstId(it) => it.resolver(db),
AttrDefId::TraitId(it) => it.resolver(db),
AttrDefId::TypeAliasId(it) => it.resolver(db),
AttrDefId::ImplId(it) => it.resolver(db),
AttrDefId::ExternBlockId(it) => it.resolver(db),
AttrDefId::UseId(it) => it.resolver(db),
AttrDefId::MacroId(it) => it.resolver(db),
AttrDefId::ExternCrateId(it) => it.resolver(db),
AttrDefId::GenericParamId(_) => return None,
AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db),
AttrsOwner::Field(it) => it.parent.resolver(db),
AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
};
let mut modpath = doc_modpath_from_str(link)?;

View file

@ -153,8 +153,7 @@ pub struct UnresolvedImport {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct UnresolvedMacroCall {
pub macro_call: InFile<SyntaxNodePtr>,
pub precise_location: Option<TextRange>,
pub range: InFile<TextRange>,
pub path: ModPath,
pub is_bang: bool,
}
@ -185,8 +184,7 @@ pub struct InactiveCode {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MacroError {
pub node: InFile<SyntaxNodePtr>,
pub precise_location: Option<TextRange>,
pub range: InFile<TextRange>,
pub message: String,
pub error: bool,
pub kind: &'static str,
@ -194,8 +192,7 @@ pub struct MacroError {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MacroExpansionParseError {
pub node: InFile<SyntaxNodePtr>,
pub precise_location: Option<TextRange>,
pub range: InFile<TextRange>,
pub errors: Arc<[SyntaxError]>,
}
@ -213,12 +210,12 @@ pub struct UnimplementedBuiltinMacro {
#[derive(Debug)]
pub struct InvalidDeriveTarget {
pub node: InFile<SyntaxNodePtr>,
pub range: InFile<TextRange>,
}
#[derive(Debug)]
pub struct MalformedDerive {
pub node: InFile<SyntaxNodePtr>,
pub range: InFile<TextRange>,
}
#[derive(Debug)]

View file

@ -48,11 +48,12 @@ use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
use either::Either;
use hir_def::{
AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId,
GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId,
LocalFieldId, Lookup, MacroExpander, MacroId, StaticId, StructId, SyntheticSyntax, TupleId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
attrs::AttrFlags,
expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
hir::{
BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
@ -67,13 +68,12 @@ use hir_def::{
},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
src::HasSource as _,
visibility::visibility_from_ast,
};
use hir_expand::{
AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
proc_macro::ProcMacroKind,
AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind,
};
use hir_ty::{
GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId,
@ -104,8 +104,8 @@ use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileId};
use stdx::{format_to, impl_from, never};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
ast::{self, HasAttrs as _, HasName, HasVisibility as _},
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
ast::{self, HasName, HasVisibility as _},
format_smolstr,
};
use triomphe::{Arc, ThinArc};
@ -113,7 +113,7 @@ use triomphe::{Arc, ThinArc};
use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{HasAttrs, resolve_doc_path_on},
attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on},
diagnostics::*,
has_source::HasSource,
semantics::{
@ -136,7 +136,7 @@ pub use {
hir_def::{
Complete,
FindPathConfig,
attr::{AttrSourceMap, Attrs, AttrsWithOwner},
attrs::{Docs, IsInnerDoc},
find_path::PrefixKind,
import_map,
lang_item::{LangItemEnum as LangItem, crate_lang_items},
@ -150,7 +150,6 @@ pub use {
},
hir_expand::{
EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition,
@ -306,11 +305,10 @@ impl Crate {
}
/// Try to get the root URL of the documentation of a crate.
pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option<String> {
// Look for #![doc(html_root_url = "...")]
let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url);
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
let doc_url = AttrFlags::doc_html_root_url(db, self.id);
doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
@ -655,7 +653,7 @@ impl Module {
// FIXME: This is accidentally quadratic.
continue;
}
emit_def_diagnostic(db, acc, diag, edition);
emit_def_diagnostic(db, acc, diag, edition, def_map.krate());
}
if !self.id.is_block_module() {
@ -674,8 +672,9 @@ impl Module {
acc.extend(def.diagnostics(db, style_lints))
}
ModuleDef::Trait(t) => {
let krate = t.krate(db);
for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
emit_def_diagnostic(db, acc, diag, edition, krate.id);
}
for item in t.items(db) {
@ -791,7 +790,7 @@ impl Module {
let ast_id_map = db.ast_id_map(file_id);
for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
emit_def_diagnostic(db, acc, diag, edition, loc.container.krate());
}
if impl_signature.target_trait.is_none()
@ -824,21 +823,10 @@ impl Module {
return None;
}
let parent = impl_def.id.into();
let generic_params = db.generic_params(parent);
let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| {
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
});
let type_params = generic_params
.iter_type_or_consts()
.filter(|(_, it)| it.type_param().is_some())
.map(|(local_id, _)| {
GenericParamId::TypeParamId(TypeParamId::from_unchecked(
TypeOrConstParamId { parent, local_id },
))
});
let res = type_params.chain(lifetime_params).any(|p| {
db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists()
});
let (lifetimes_attrs, type_and_consts_attrs) =
AttrFlags::query_generic_params(db, parent);
let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE))
|| type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE));
Some(res)
})()
.unwrap_or(false);
@ -999,6 +987,17 @@ impl Module {
) -> Option<ModPath> {
hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
}
#[inline]
pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option<Symbol> {
AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id))
}
/// Whether it has `#[path = "..."]` attribute.
#[inline]
pub fn has_path(&self, db: &dyn HirDatabase) -> bool {
self.attrs(db).attrs.contains(AttrFlags::HAS_PATH)
}
}
fn macro_call_diagnostics<'db>(
@ -1013,31 +1012,19 @@ fn macro_call_diagnostics<'db>(
if let Some(err) = err {
let loc = db.lookup_intern_macro_call(macro_call_id);
let file_id = loc.kind.file_id();
let node =
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
let mut range = precise_macro_call_location(&loc.kind, db, loc.krate);
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
let precise_location = if editioned_file_id == file_id {
Some(
err.span().range
+ db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
)
} else {
None
};
acc.push(MacroError { node, precise_location, message, error, kind }.into());
if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) {
range.value = err.span().range
+ db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start();
}
acc.push(MacroError { range, message, error, kind }.into());
}
if !parse_errors.is_empty() {
let loc = db.lookup_intern_macro_call(macro_call_id);
let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
acc.push(
MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
.into(),
)
let range = precise_macro_call_location(&loc.kind, db, loc.krate);
acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into())
}
}
@ -1061,6 +1048,7 @@ fn emit_macro_def_diagnostics<'db>(
acc,
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
edition,
m.krate(db).id,
);
}
}
@ -1070,8 +1058,9 @@ fn emit_def_diagnostic<'db>(
acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnostic,
edition: Edition,
krate: base_db::Crate,
) {
emit_def_diagnostic_(db, acc, &diag.kind, edition)
emit_def_diagnostic_(db, acc, &diag.kind, edition, krate)
}
fn emit_def_diagnostic_<'db>(
@ -1079,6 +1068,7 @@ fn emit_def_diagnostic_<'db>(
acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnosticKind,
edition: Edition,
krate: base_db::Crate,
) {
match diag {
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
@ -1101,8 +1091,7 @@ fn emit_def_diagnostic_<'db>(
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
acc.push(
MacroError {
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
precise_location: None,
range: InFile::new(ast.file_id, item.text_range()),
message: format!("{}: {message}", path.display(db, edition)),
error,
kind,
@ -1132,11 +1121,10 @@ fn emit_def_diagnostic_<'db>(
);
}
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
let (node, precise_location) = precise_macro_call_location(ast, db);
let location = precise_macro_call_location(ast, db, krate);
acc.push(
UnresolvedMacroCall {
macro_call: node,
precise_location,
range: location,
path: path.clone(),
is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
}
@ -1155,34 +1143,12 @@ fn emit_def_diagnostic_<'db>(
);
}
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
let node = ast.to_node(db);
let derive = node.attrs().nth(*id);
match derive {
Some(derive) => {
acc.push(
InvalidDeriveTarget {
node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
}
.into(),
);
}
None => stdx::never!("derive diagnostic on item without derive attribute"),
}
let derive = id.find_attr_range(db, krate, *ast).3.path_range();
acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MalformedDerive { ast, id } => {
let node = ast.to_node(db);
let derive = node.attrs().nth(*id);
match derive {
Some(derive) => {
acc.push(
MalformedDerive {
node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
}
.into(),
);
}
None => stdx::never!("derive diagnostic on item without derive attribute"),
}
let derive = id.find_attr_range(db, krate, *ast).2;
acc.push(MalformedDerive { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MacroDefError { ast, message } => {
let node = ast.to_node(db);
@ -1201,61 +1167,28 @@ fn emit_def_diagnostic_<'db>(
fn precise_macro_call_location(
ast: &MacroCallKind,
db: &dyn HirDatabase,
) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
krate: base_db::Crate,
) -> InFile<TextRange> {
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
// - e.g. the full attribute for macro errors, but only the name for name resolution
match ast {
MacroCallKind::FnLike { ast_id, .. } => {
let node = ast_id.to_node(db);
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
node.path()
.and_then(|it| it.segment())
.and_then(|it| it.name_ref())
.map(|it| it.syntax().text_range()),
)
let range = node
.path()
.and_then(|it| it.segment())
.and_then(|it| it.name_ref())
.map(|it| it.syntax().text_range());
let range = range.unwrap_or_else(|| node.syntax().text_range());
ast_id.with_value(range)
}
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let node = ast_id.to_node(db);
// Compute the precise location of the macro name's token in the derive
// list.
let token = (|| {
let derive_attr = collect_attrs(&node)
.nth(derive_attr_index.ast_index())
.and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
let chunk_by = token_tree
.syntax()
.children_with_tokens()
.filter_map(|elem| match elem {
syntax::NodeOrToken::Token(tok) => Some(tok),
_ => None,
})
.chunk_by(|t| t.kind() == T![,]);
let (_, mut group) = chunk_by
.into_iter()
.filter(|&(comma, _)| !comma)
.nth(*derive_index as usize)?;
group.find(|t| t.kind() == T![ident])
})();
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
token.as_ref().map(|tok| tok.text_range()),
)
let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index);
ast_id.with_value(range)
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db);
let attr = collect_attrs(&node)
.nth(invoc_attr_index.ast_index())
.and_then(|x| Either::left(x.1))
.unwrap_or_else(|| {
panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
});
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
Some(attr.syntax().text_range()),
)
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2;
ast_id.with_value(attr_range)
}
}
}
@ -1452,7 +1385,7 @@ impl Struct {
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
db.struct_signature(self.id).repr
AttrFlags::repr(db, self.id.into())
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
@ -1468,7 +1401,7 @@ impl Struct {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> {
@ -1556,7 +1489,7 @@ impl Union {
.collect()
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
}
@ -1591,7 +1524,7 @@ impl Enum {
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
db.enum_signature(self.id).repr
AttrFlags::repr(db, self.id.into())
}
pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
@ -1607,7 +1540,7 @@ impl Enum {
let interner = DbInterner::new_no_crate(db);
Type::new_for_crate(
self.id.lookup(db).container.krate(),
match db.enum_signature(self.id).variant_body_type() {
match EnumSignature::variant_body_type(db, self.id) {
layout::IntegerType::Pointer(sign) => match sign {
true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize),
false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize),
@ -1648,7 +1581,7 @@ impl Enum {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
}
@ -1748,7 +1681,7 @@ impl Variant {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> {
@ -2233,8 +2166,7 @@ fn expr_store_diagnostics<'db>(
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
}
ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
precise_location: None,
range: node.map(|ptr| ptr.text_range()),
path: path.clone(),
is_bang: true,
}
@ -2459,33 +2391,33 @@ impl Function {
/// Does this function have `#[test]` attribute?
pub fn is_test(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_test()
self.attrs(db).is_test()
}
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).export_name() == Some(&sym::main)
self.exported_main(db)
|| self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main
}
/// Is this a function with an `export_name` of `main`?
pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).export_name() == Some(&sym::main)
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
}
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_ignore()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE)
}
/// Does this function have `#[bench]` attribute?
pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_bench()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH)
}
/// Is this function marked as unstable with `#[feature]` attribute?
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn is_unsafe_to_call(
@ -2496,8 +2428,7 @@ impl Function {
) -> bool {
let (target_features, target_feature_is_safe_in_target) = caller
.map(|caller| {
let target_features =
hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()));
let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id);
let target_feature_is_safe_in_target =
match &caller.krate(db).id.workspace_data(db).target {
Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
@ -2528,14 +2459,6 @@ impl Function {
}
pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
let attrs = db.attrs(self.id.into());
// FIXME: Store this in FunctionData flags?
if !(attrs.is_proc_macro()
|| attrs.is_proc_macro_attribute()
|| attrs.is_proc_macro_derive())
{
return None;
}
let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
}
@ -2994,7 +2917,7 @@ impl Trait {
/// `#[rust_analyzer::completions(...)]` mode.
pub fn complete(self, db: &dyn HirDatabase) -> Complete {
Complete::extract(true, &self.attrs(db))
Complete::extract(true, self.attrs(db).attrs)
}
}
@ -3165,10 +3088,10 @@ impl Macro {
let loc = id.lookup(db);
let source = loc.source(db);
match loc.kind {
ProcMacroKind::CustomDerive => db
.attrs(id.into())
.parse_proc_macro_derive()
.map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it),
ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else(
|| as_name_opt(source.value.name()),
|info| Name::new_symbol_root(info.trait_name.clone()),
),
ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()),
}
}
@ -3176,7 +3099,7 @@ impl Macro {
}
pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists())
matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT))
}
pub fn is_proc_macro(self) -> bool {
@ -4009,18 +3932,10 @@ impl DeriveHelper {
}
pub fn name(&self, db: &dyn HirDatabase) -> Name {
match self.derive {
makro @ MacroId::Macro2Id(_) => db
.attrs(makro.into())
.parse_rustc_builtin_macro()
.and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
MacroId::MacroRulesId(_) => None,
makro @ MacroId::ProcMacroId(_) => db
.attrs(makro.into())
.parse_proc_macro_derive()
.and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
}
.unwrap_or_else(Name::missing)
AttrFlags::derive_info(db, self.derive)
.and_then(|it| it.helpers.get(self.idx as usize))
.map(|helper| Name::new_symbol_root(helper.clone()))
.unwrap_or_else(Name::missing)
}
}
@ -4244,7 +4159,7 @@ impl TypeParam {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(GenericParamId::from(self.id).into()).is_unstable()
self.attrs(db).is_unstable()
}
}

View file

@ -21,7 +21,6 @@ use hir_def::{
};
use hir_expand::{
EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::{FileRangeWrapper, HirFileRange, InRealFile},
@ -36,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
use span::{Edition, FileId, SyntaxContext};
use span::{FileId, SyntaxContext};
use stdx::{TupleExt, always};
use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@ -386,17 +385,14 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new(
self.db,
file,
self.file_to_module_defs(file).next()?.krate().edition(self.db),
))
let krate = self.file_to_module_defs(file).next()?.krate();
Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let file_id = self
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
.unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file_id));
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
@ -1197,33 +1193,34 @@ impl<'db> SemanticsImpl<'db> {
.zip(Some(item))
})
.map(|(call_id, item)| {
let attr_id = match db.lookup_intern_macro_call(call_id).kind {
let item_range = item.syntax().text_range();
let loc = db.lookup_intern_macro_call(call_id);
let text_range = match loc.kind {
hir_expand::MacroCallKind::Attr {
invoc_attr_index, ..
} => invoc_attr_index.ast_index(),
_ => 0,
censored_attr_ids: attr_ids,
..
} => {
// FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the invoking
// attribute. But in
// ```
// mod foo {
// #![inner]
// }
// ```
// we don't wanna strip away stuff in the `mod foo {` range, that is
// here if the id corresponds to an inner attribute we got strip all
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
// FIXME: Should cfg_attr be handled differently?
let (attr, _, _, _) = attr_ids
.invoc_attr()
.find_attr_range_with_source(db, loc.krate, &item);
let start = attr.syntax().text_range().start();
TextRange::new(start, item_range.end())
}
_ => item_range,
};
// FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the invoking
// attribute. But in
// ```
// mod foo {
// #![inner]
// }
// ```
// we don't wanna strip away stuff in the `mod foo {` range, that is
// here if the id corresponds to an inner attribute we got strip all
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
let text_range = item.syntax().text_range();
let start = collect_attrs(&item)
.nth(attr_id)
.map(|attr| match attr.1 {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
process_expansion_for_token(ctx, &mut stack, call_id)
})
@ -1473,6 +1470,14 @@ impl<'db> SemanticsImpl<'db> {
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
pub fn diagnostics_display_range_for_range(
&self,
src: InFile<TextRange>,
) -> FileRangeWrapper<FileId> {
let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
fn token_ancestors_with_macros(
&self,
token: SyntaxToken,

View file

@ -5,7 +5,7 @@
//! node for a *child*, and get its hir.
use either::Either;
use hir_expand::{HirFileId, attrs::collect_attrs};
use hir_expand::HirFileId;
use span::AstIdNode;
use syntax::{AstPtr, ast};
@ -94,6 +94,7 @@ impl ChildBySource for ModuleId {
impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let krate = file_id.krate(db);
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
self.extern_blocks().for_each(|extern_block| {
@ -123,12 +124,10 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| {
let adt = ast_id.to_node(db);
calls.for_each(|(attr_id, call_id, calls)| {
if let Some((_, Either::Left(attr))) =
collect_attrs(&adt).nth(attr_id.ast_index())
{
res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
}
// FIXME: Fix cfg_attr handling.
let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
});
},
);

View file

@ -416,12 +416,12 @@ impl<'a> SymbolCollector<'a> {
let mut do_not_complete = Complete::Yes;
if let Some(attrs) = def.attrs(self.db) {
do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
if let Some(trait_do_not_complete) = trait_do_not_complete {
do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
}
for alias in attrs.doc_aliases() {
for alias in attrs.doc_aliases(self.db) {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
@ -465,9 +465,9 @@ impl<'a> SymbolCollector<'a> {
let mut do_not_complete = Complete::Yes;
if let Some(attrs) = def.attrs(self.db) {
do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
for alias in attrs.doc_aliases() {
for alias in attrs.doc_aliases(self.db) {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,

View file

@ -1,7 +1,7 @@
use std::iter::{self, Peekable};
use either::Either;
use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym};
use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics};
use ide_db::RootDatabase;
use ide_db::assists::ExprFillDefaultMode;
use ide_db::syntax_helpers::suggest_name;
@ -401,7 +401,7 @@ impl ExtendedVariant {
fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
match self {
ExtendedVariant::Variant { variant: var, .. } => {
var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate
}
_ => false,
}
@ -424,7 +424,7 @@ impl ExtendedEnum {
fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
match self {
ExtendedEnum::Enum { enum_: e, .. } => {
e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate
}
_ => false,
}

View file

@ -1,4 +1,4 @@
use hir::{HasVisibility, sym};
use hir::HasVisibility;
use ide_db::{
FxHashMap, FxHashSet,
assists::AssistId,
@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
let kind = struct_type.kind(ctx.db());
let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(sym::non_exhaustive).exists();
let is_non_exhaustive = struct_def.attrs(ctx.db())?.is_non_exhaustive();
let is_foreign_crate = struct_def.module(ctx.db()).is_some_and(|m| m.krate() != module.krate());
let fields = struct_type.fields(ctx.db());

View file

@ -1,7 +1,6 @@
use std::iter;
use ast::edit::IndentLevel;
use hir::{HasAttrs, sym};
use ide_db::base_db::AnchoredPathBuf;
use itertools::Itertools;
use stdx::format_to;
@ -53,14 +52,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut buf = String::from("./");
let db = ctx.db();
match parent_module.name(db) {
Some(name)
if !parent_module.is_mod_rs(db)
&& parent_module
.attrs(db)
.by_key(sym::path)
.string_value_unescape()
.is_none() =>
{
Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => {
format_to!(buf, "{}/", name.as_str())
}
_ => (),

View file

@ -68,7 +68,7 @@ pub mod utils;
use hir::Semantics;
use ide_db::{EditionedFileId, RootDatabase};
use syntax::{Edition, TextRange};
use syntax::TextRange;
pub(crate) use crate::assist_context::{AssistContext, Assists};
@ -90,7 +90,7 @@ pub fn assists(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(range.file_id)
.unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
.unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, range.file_id));
let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
let mut acc = Assists::new(&ctx, resolve);
handlers::all().iter().for_each(|handler| {

View file

@ -321,11 +321,13 @@ fn check_with_config(
let _tracing = setup_tracing();
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
db.enable_proc_attr_macros();
let sema = Semantics::new(&db);
let file_with_caret_id =
sema.attach_first_edition(file_with_caret_id.file_id(&db)).unwrap_or(file_with_caret_id);
let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
let sema = Semantics::new(&db);
let ctx = AssistContext::new(sema, &config, frange);
let resolve = match expected {
ExpectedResult::Unresolved => AssistResolveStrategy::None,

View file

@ -101,16 +101,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
}
pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
attrs.iter().any(|attr| {
let path = attr.path();
(|| {
Some(
path.segments().first()?.as_str().starts_with("test")
|| path.segments().last()?.as_str().ends_with("test"),
)
})()
.unwrap_or_default()
})
attrs.is_test()
}
#[derive(Clone, Copy, PartialEq)]
@ -136,7 +127,7 @@ pub fn filter_assoc_items(
.copied()
.filter(|assoc_item| {
if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
&& assoc_item.attrs(sema.db).has_doc_hidden()
&& assoc_item.attrs(sema.db).is_doc_hidden()
{
if let hir::AssocItem::Function(f) = assoc_item
&& !f.has_body(sema.db)

View file

@ -56,7 +56,7 @@ pub(super) fn complete_lint(
};
let mut item =
CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
item.documentation(Documentation::new(description.to_owned()));
item.documentation(Documentation::new_owned(description.to_owned()));
item.add_to(acc, ctx.db)
}
}

View file

@ -266,7 +266,7 @@ fn import_on_the_fly(
let original_item = &import.original_item;
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(original_item)
&& ctx.check_stability(original_item.attrs(ctx.db).as_deref())
&& ctx.check_stability(original_item.attrs(ctx.db).as_ref())
})
.filter(|import| filter_excluded_flyimport(ctx, import))
.sorted_by(|a, b| {
@ -313,7 +313,7 @@ fn import_on_the_fly_pat_(
let original_item = &import.original_item;
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(original_item)
&& ctx.check_stability(original_item.attrs(ctx.db).as_deref())
&& ctx.check_stability(original_item.attrs(ctx.db).as_ref())
})
.sorted_by(|a, b| {
let key = |import_path| {

View file

@ -446,7 +446,7 @@ fn add_custom_postfix_completions(
let body = snippet.postfix_snippet(receiver_text);
let mut builder =
postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}

View file

@ -139,7 +139,7 @@ fn add_custom_completions(
};
let body = snip.snippet();
let mut builder = snippet(ctx, cap, trigger, &body);
builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}

View file

@ -559,7 +559,7 @@ impl CompletionContext<'_> {
I: hir::HasAttrs + Copy,
{
let attrs = item.attrs(self.db);
attrs.doc_aliases().map(|it| it.as_str().into()).collect()
attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
}
/// Check if an item is `#[doc(hidden)]`.
@ -573,7 +573,7 @@ impl CompletionContext<'_> {
}
/// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool {
let Some(attrs) = attrs else {
return true;
};
@ -591,15 +591,15 @@ impl CompletionContext<'_> {
/// Whether the given trait is an operator trait or not.
pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
match trait_.attrs(self.db).lang() {
Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
match trait_.attrs(self.db).lang(self.db) {
Some(lang) => OP_TRAIT_LANG.contains(&lang),
None => false,
}
}
/// Whether the given trait has `#[doc(notable_trait)]`
pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool {
trait_.attrs(self.db).has_doc_notable_trait()
trait_.attrs(self.db).is_doc_notable_trait()
}
/// Returns the traits in scope, with the [`Drop`] trait removed.
@ -656,7 +656,7 @@ impl CompletionContext<'_> {
fn is_visible_impl(
&self,
vis: &hir::Visibility,
attrs: &hir::Attrs,
attrs: &hir::AttrsWithOwner,
defining_crate: hir::Crate,
) -> Visible {
if !self.check_stability(Some(attrs)) {
@ -678,14 +678,18 @@ impl CompletionContext<'_> {
if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes }
}
pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
pub(crate) fn is_doc_hidden(
&self,
attrs: &hir::AttrsWithOwner,
defining_crate: hir::Crate,
) -> bool {
// `doc(hidden)` items are only completed within the defining crate.
self.krate != defining_crate && attrs.has_doc_hidden()
self.krate != defining_crate && attrs.is_doc_hidden()
}
pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec<SmolStr> {
if let Some(attrs) = scope_def.attrs(self.db) {
attrs.doc_aliases().map(|it| it.as_str().into()).collect()
attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
} else {
vec![]
}
@ -889,35 +893,35 @@ impl<'db> CompletionContext<'db> {
}
}
const OP_TRAIT_LANG_NAMES: &[&str] = &[
"add_assign",
"add",
"bitand_assign",
"bitand",
"bitor_assign",
"bitor",
"bitxor_assign",
"bitxor",
"deref_mut",
"deref",
"div_assign",
"div",
"eq",
"fn_mut",
"fn_once",
"fn",
"index_mut",
"index",
"mul_assign",
"mul",
"neg",
"not",
"partial_ord",
"rem_assign",
"rem",
"shl_assign",
"shl",
"shr_assign",
"shr",
"sub",
const OP_TRAIT_LANG: &[hir::LangItem] = &[
hir::LangItem::AddAssign,
hir::LangItem::Add,
hir::LangItem::BitAndAssign,
hir::LangItem::BitAnd,
hir::LangItem::BitOrAssign,
hir::LangItem::BitOr,
hir::LangItem::BitXorAssign,
hir::LangItem::BitXor,
hir::LangItem::DerefMut,
hir::LangItem::Deref,
hir::LangItem::DivAssign,
hir::LangItem::Div,
hir::LangItem::PartialEq,
hir::LangItem::FnMut,
hir::LangItem::FnOnce,
hir::LangItem::Fn,
hir::LangItem::IndexMut,
hir::LangItem::Index,
hir::LangItem::MulAssign,
hir::LangItem::Mul,
hir::LangItem::Neg,
hir::LangItem::Not,
hir::LangItem::PartialOrd,
hir::LangItem::RemAssign,
hir::LangItem::Rem,
hir::LangItem::ShlAssign,
hir::LangItem::Shl,
hir::LangItem::ShrAssign,
hir::LangItem::Shr,
hir::LangItem::Sub,
];

View file

@ -57,7 +57,8 @@ pub struct CompletionItem {
/// Additional info to show in the UI pop up.
pub detail: Option<String>,
pub documentation: Option<Documentation>,
// FIXME: Make this with `'db` lifetime.
pub documentation: Option<Documentation<'static>>,
/// Whether this item is marked as deprecated
pub deprecated: bool,
@ -488,7 +489,8 @@ pub(crate) struct Builder {
insert_text: Option<String>,
is_snippet: bool,
detail: Option<String>,
documentation: Option<Documentation>,
// FIXME: Make this with `'db` lifetime.
documentation: Option<Documentation<'static>>,
lookup: Option<SmolStr>,
kind: CompletionItemKind,
text_edit: Option<TextEdit>,
@ -644,11 +646,11 @@ impl Builder {
self
}
#[allow(unused)]
pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder {
self.set_documentation(Some(docs))
}
pub(crate) fn set_documentation(&mut self, docs: Option<Documentation>) -> &mut Builder {
self.documentation = docs;
pub(crate) fn set_documentation(&mut self, docs: Option<Documentation<'_>>) -> &mut Builder {
self.documentation = docs.map(Documentation::into_owned);
self
}
pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {

View file

@ -10,7 +10,7 @@ pub(crate) mod type_alias;
pub(crate) mod union_literal;
pub(crate) mod variant;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym};
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use ide_db::text_edit::TextEdit;
use ide_db::{
RootDatabase, SnippetCap, SymbolKind,
@ -91,8 +91,7 @@ impl<'a> RenderContext<'a> {
}
fn is_deprecated(&self, def: impl HasAttrs) -> bool {
let attrs = def.attrs(self.db());
attrs.by_key(sym::deprecated).exists()
def.attrs(self.db()).is_deprecated()
}
fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
@ -115,7 +114,7 @@ impl<'a> RenderContext<'a> {
}
// FIXME: remove this
fn docs(&self, def: impl HasDocs) -> Option<Documentation> {
fn docs(&self, def: impl HasDocs) -> Option<Documentation<'a>> {
def.docs(self.db())
}
}
@ -320,7 +319,9 @@ pub(crate) fn render_expr(
);
let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit);
item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
item.documentation(Documentation::new_owned(String::from(
"Autogenerated expression by term search",
)));
item.set_relevance(crate::CompletionRelevance {
type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
..Default::default()
@ -553,7 +554,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
}
}
fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation> {
fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation<'_>> {
use hir::ModuleDef::*;
match resolution {
ScopeDef::ModuleDef(Module(it)) => it.docs(db),

View file

@ -180,7 +180,7 @@ impl Variant {
}
}
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
match self {
Variant::Struct(it) => it.docs(db),
Variant::EnumVariant(it) => it.docs(db),

View file

@ -108,7 +108,7 @@ fn build_completion(
label: SmolStr,
lookup: SmolStr,
pat: String,
def: impl HasDocs + Copy,
def: impl HasDocs,
adt_ty: hir::Type<'_>,
// Missing in context of match statement completions
is_variant_missing: bool,

View file

@ -1,7 +1,7 @@
//! Code common to structs, unions, and enum variants.
use crate::context::CompletionContext;
use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym};
use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
use ide_db::SnippetCap;
use itertools::Itertools;
use syntax::SmolStr;
@ -105,8 +105,8 @@ pub(crate) fn visible_fields(
.copied()
.collect::<Vec<_>>();
let has_invisible_field = n_fields - fields.len() > 0;
let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists()
&& item.krate(ctx.db) != module.krate();
let is_foreign_non_exhaustive =
item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate();
let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
Some((fields, fields_omitted))
}

View file

@ -160,12 +160,12 @@ pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
let change_fixture = ChangeFixture::parse(&database, ra_fixture);
let change_fixture = ChangeFixture::parse(ra_fixture);
database.enable_proc_attr_macros();
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
let position = FilePosition { file_id: file_id.file_id(&database), offset };
let position = FilePosition { file_id: file_id.file_id(), offset };
(database, position)
}

View file

@ -781,8 +781,8 @@ fn main() {
"#,
expect![[r#"
me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
"#]],
);
}

View file

@ -5,8 +5,10 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
use std::borrow::Cow;
use crate::RootDatabase;
use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
use crate::documentation::{Documentation, HasDocs};
use crate::famous_defs::FamousDefs;
use arrayvec::ArrayVec;
use either::Either;
@ -21,7 +23,7 @@ use hir::{
use span::Edition;
use stdx::{format_to, impl_from};
use syntax::{
SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
SyntaxKind, SyntaxNode, SyntaxToken,
ast::{self, AstNode},
match_ast,
};
@ -199,21 +201,25 @@ impl Definition {
Some(name)
}
pub fn docs(
pub fn docs<'db>(
&self,
db: &RootDatabase,
db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
) -> Option<Documentation> {
self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
) -> Option<Documentation<'db>> {
self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs {
Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()),
Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()),
Either::Right(docs) => docs,
})
}
pub fn docs_with_rangemap(
pub fn docs_with_rangemap<'db>(
&self,
db: &RootDatabase,
db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
) -> Option<(Documentation, Option<DocsRangeMap>)> {
) -> Option<Either<Cow<'db, hir::Docs>, Documentation<'db>>> {
let docs = match self {
Definition::Macro(it) => it.docs_with_rangemap(db),
Definition::Field(it) => it.docs_with_rangemap(db),
@ -229,15 +235,13 @@ impl Definition {
it.docs_with_rangemap(db).or_else(|| {
// docs are missing, try to fall back to the docs of the aliased item.
let adt = it.ty(db).as_adt()?;
let (docs, range_map) = adt.docs_with_rangemap(db)?;
let mut docs = adt.docs_with_rangemap(db)?.into_owned();
let header_docs = format!(
"*This is the documentation for* `{}`\n\n",
adt.display(db, display_target)
);
let offset = TextSize::new(header_docs.len() as u32);
let range_map = range_map.shift_docstring_line_range(offset);
let docs = header_docs + docs.as_str();
Some((Documentation::new(docs), range_map))
docs.prepend_str(&header_docs);
Some(Cow::Owned(docs))
})
}
Definition::BuiltinType(it) => {
@ -246,7 +250,7 @@ impl Definition {
let primitive_mod =
format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
doc_owner.docs_with_rangemap(fd.0.db)
doc_owner.docs_with_rangemap(db)
})
}
Definition::BuiltinLifetime(StaticLifetime) => None,
@ -282,7 +286,7 @@ impl Definition {
);
}
return Some((Documentation::new(docs.replace('*', "\\*")), None));
return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*"))));
}
Definition::ToolModule(_) => None,
Definition::DeriveHelper(_) => None,
@ -299,7 +303,7 @@ impl Definition {
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.docs_with_rangemap(db)
})
.map(|(docs, range_map)| (docs, Some(range_map)))
.map(Either::Left)
}
pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {

View file

@ -1,337 +1,100 @@
//! Documentation attribute related utilities.
use either::Either;
use hir::{
AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
db::{DefDatabase, HirDatabase},
resolve_doc_path_on, sym,
};
use itertools::Itertools;
use span::{TextRange, TextSize};
use syntax::{
AstToken,
ast::{self, IsString},
};
use std::borrow::Cow;
use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
pub struct Documentation<'db>(Cow<'db, str>);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
impl<'db> Documentation<'db> {
#[inline]
pub fn new_owned(s: String) -> Self {
Documentation(Cow::Owned(s))
}
#[inline]
pub fn new_borrowed(s: &'db str) -> Self {
Documentation(Cow::Borrowed(s))
}
#[inline]
pub fn into_owned(self) -> Documentation<'static> {
Documentation::new_owned(self.0.into_owned())
}
#[inline]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
pub trait HasDocs: HasAttrs + Copy {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
let docs = match self.docs_with_rangemap(db)? {
Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()),
Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()),
};
Some(docs)
}
}
pub trait HasDocs: HasAttrs {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef>;
}
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<(InFile<TextRange>, AttrId)> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some((InFile { file_id, value: range }, idx))
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some((InFile { file_id, value: range }, idx))
}
}
}
pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
let mapping = self
.mapping
.into_iter()
.map(|(buf_offset, id, base_offset)| {
let buf_offset = buf_offset.checked_add(offset).unwrap();
(buf_offset, id, base_offset)
})
.collect_vec();
DocsRangeMap { source_map: self.source_map, mapping }
}
}
pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
let docs = attrs
.by_key(sym::doc)
.attrs()
.filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
}
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
// We don't trim trailing whitespace from doc comments as multiple trailing spaces
// indicates a hard line break in Markdown.
let lines = doc.lines().map(|line| {
line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
});
buf.extend(Itertools::intersperse(lines, "\n"));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() { None } else { Some(buf) }
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
docs_from_attrs(&self.attrs(db)).map(Documentation)
}
fn docs_with_rangemap(
self,
db: &dyn HirDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
docs_with_rangemap(db, &self.attrs(db))
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
)*};
}
impl_has_docs![
Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
];
macro_rules! impl_has_docs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasDocs for hir::$variant {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
hir::$enum::$variant(self).docs(db)
}
fn docs_with_rangemap(
self,
db: &dyn HirDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
hir::$enum::$variant(self).docs_with_rangemap(db)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
}
}
)*};
}
impl_has_docs_enum![Struct, Union, Enum for Adt];
impl HasDocs for hir::AssocItem {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self {
hir::AssocItem::Function(it) => it.docs(db),
hir::AssocItem::Const(it) => it.docs(db),
hir::AssocItem::TypeAlias(it) => it.docs(db),
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
match self {
hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
}
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
match self {
hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
}
}
}
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db));
let decl_docs = docs_from_attrs(&self.attrs(db));
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
let decl_docs = docs_with_rangemap(db, &self.attrs(db));
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(
Some((Documentation(mut decl_docs), mut decl_range_map)),
Some((Documentation(crate_docs), crate_range_map)),
) => {
decl_docs.push('\n');
decl_docs.push('\n');
let offset = TextSize::new(decl_docs.len() as u32);
decl_docs += &crate_docs;
let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
decl_range_map.mapping.extend(crate_range_map.mapping);
Some((Documentation(decl_docs), decl_range_map))
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
self.hir_docs(db).map(Cow::Borrowed)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
is_inner_doc: hir::IsInnerDoc,
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {}
)*};
}
fn doc_indent(attrs: &hir::Attrs) -> usize {
let mut min = !0;
for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
if let Some(m) =
val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
{
min = min.min(m);
impl_has_docs![
Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
AssocItem, Struct, Union, Enum,
];
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
let crate_docs = self.resolved_crate(db)?.hir_docs(db);
let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(docs), None) | (None, Some(docs)) => {
Some(Documentation::new_borrowed(docs.docs()))
}
(Some(decl_docs), Some(crate_docs)) => {
let mut docs = String::with_capacity(
decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(),
);
docs.push_str(decl_docs.docs());
docs.push_str("\n\n");
docs.push_str(crate_docs.docs());
Some(Documentation::new_owned(docs))
}
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
let crate_docs = self.resolved_crate(db)?.hir_docs(db);
let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)),
(Some(decl_docs), Some(crate_docs)) => {
let mut docs = decl_docs.clone();
docs.append_str("\n\n");
docs.append(crate_docs);
Some(Cow::Owned(docs))
}
}
}
min
}

View file

@ -25,18 +25,14 @@ impl RootDatabase {
// We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`.
std::panic::catch_unwind(|| {
let mut db = RootDatabase::default();
let fixture = test_fixture::ChangeFixture::parse_with_proc_macros(
&db,
text,
minicore.0,
Vec::new(),
);
let fixture =
test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new());
db.apply_change(fixture.change);
let files = fixture
.files
.into_iter()
.zip(fixture.file_lines)
.map(|(file_id, range)| (file_id.file_id(&db), range))
.map(|(file_id, range)| (file_id.file_id(), range))
.collect();
(db, files, fixture.sysroot_files)
})
@ -526,7 +522,7 @@ impl_empty_upmap_from_ra_fixture!(
String,
Symbol,
SmolStr,
Documentation,
Documentation<'_>,
SymbolKind,
CfgExpr,
ReferenceCategory,

View file

@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool {
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub fn format_docs(src: &Documentation) -> String {
pub fn format_docs(src: &Documentation<'_>) -> String {
format_docs_(src.as_str())
}

View file

@ -12,7 +12,7 @@ use either::Either;
use hir::{
Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
ModuleSource, PathResolution, Semantics, Visibility, sym,
ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use parser::SyntaxKind;
@ -169,7 +169,7 @@ impl SearchScope {
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
.map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
);
}
SearchScope { entries }
@ -183,11 +183,9 @@ impl SearchScope {
let source_root = db.file_source_root(root_file).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
);
entries.extend(source_root.iter().map(|id| {
(EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
}));
}
SearchScope { entries }
}
@ -201,7 +199,7 @@ impl SearchScope {
SearchScope {
entries: source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
.map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
.collect(),
}
}
@ -368,7 +366,7 @@ impl Definition {
if let Definition::Macro(macro_def) = self {
return match macro_def.kind(db) {
hir::MacroKind::Declarative => {
if macro_def.attrs(db).by_key(sym::macro_export).exists() {
if macro_def.attrs(db).is_macro_export() {
SearchScope::reverse_dependencies(db, module.krate())
} else {
SearchScope::krate(db, module.krate())

View file

@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@ -16,7 +16,7 @@
Struct(
Struct {
id: StructId(
3401,
3801,
),
},
),
@ -24,7 +24,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -50,7 +50,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -58,7 +58,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -84,7 +84,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -92,7 +92,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -118,7 +118,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -126,7 +126,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -152,7 +152,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -160,7 +160,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -186,7 +186,7 @@
Struct(
Struct {
id: StructId(
3401,
3801,
),
},
),
@ -194,7 +194,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -220,7 +220,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -228,7 +228,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {

View file

@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@ -22,7 +22,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -49,14 +49,14 @@
def: TypeAlias(
TypeAlias {
id: TypeAliasId(
6800,
6c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -88,7 +88,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -115,14 +115,14 @@
def: Const(
Const {
id: ConstId(
6000,
6400,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -147,14 +147,14 @@
def: Const(
Const {
id: ConstId(
6002,
6402,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -180,7 +180,7 @@
Enum(
Enum {
id: EnumId(
4c00,
5000,
),
},
),
@ -188,7 +188,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -214,7 +214,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -222,7 +222,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -248,7 +248,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -256,7 +256,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -281,14 +281,14 @@
def: Static(
Static {
id: StaticId(
6400,
6800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -314,7 +314,7 @@
Struct(
Struct {
id: StructId(
4401,
4801,
),
},
),
@ -322,7 +322,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -348,7 +348,7 @@
Struct(
Struct {
id: StructId(
4400,
4800,
),
},
),
@ -356,7 +356,7 @@
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroCallId(
Id(3c00),
Id(4000),
),
),
ptr: SyntaxNodePtr {
@ -382,7 +382,7 @@
Struct(
Struct {
id: StructId(
4405,
4805,
),
},
),
@ -390,7 +390,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -418,7 +418,7 @@
Struct(
Struct {
id: StructId(
4406,
4806,
),
},
),
@ -426,7 +426,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -454,7 +454,7 @@
Struct(
Struct {
id: StructId(
4407,
4807,
),
},
),
@ -462,7 +462,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -488,7 +488,7 @@
Struct(
Struct {
id: StructId(
4402,
4802,
),
},
),
@ -496,7 +496,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -521,14 +521,14 @@
def: Trait(
Trait {
id: TraitId(
5800,
5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -554,7 +554,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -562,7 +562,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -588,7 +588,7 @@
Union(
Union {
id: UnionId(
5000,
5400,
),
},
),
@ -596,7 +596,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -622,7 +622,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@ -632,7 +632,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -658,7 +658,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@ -668,7 +668,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -694,7 +694,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3801,
3c01,
),
),
},
@ -702,7 +702,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -727,14 +727,14 @@
def: Function(
Function {
id: FunctionId(
5c02,
6002,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -761,14 +761,14 @@
def: Function(
Function {
id: FunctionId(
5c01,
6001,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -796,7 +796,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3800,
3c00,
),
),
},
@ -804,7 +804,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -829,14 +829,14 @@
def: Function(
Function {
id: FunctionId(
5c00,
6000,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -862,7 +862,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3801,
3c01,
),
),
},
@ -870,7 +870,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -895,14 +895,14 @@
def: Function(
Function {
id: FunctionId(
5c03,
6003,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -930,7 +930,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@ -943,7 +943,7 @@
Struct(
Struct {
id: StructId(
4403,
4803,
),
},
),
@ -951,7 +951,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -977,7 +977,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@ -989,14 +989,14 @@
def: Trait(
Trait {
id: TraitId(
5800,
5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1022,7 +1022,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -1030,7 +1030,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1056,7 +1056,7 @@
Struct(
Struct {
id: StructId(
4404,
4804,
),
},
),
@ -1064,7 +1064,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1090,7 +1090,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -1098,7 +1098,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1124,7 +1124,7 @@
Struct(
Struct {
id: StructId(
4404,
4804,
),
},
),
@ -1132,7 +1132,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {

View file

@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {

View file

@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -47,7 +47,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {

View file

@ -114,8 +114,7 @@ fn assoc_item_of_trait(
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
use hir::FilePosition;
use hir::Semantics;
use hir::{EditionedFileId, FilePosition, Semantics};
use span::Edition;
use syntax::ast::{self, AstNode};
use test_fixture::ChangeFixture;
@ -127,10 +126,11 @@ mod tests {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
let change_fixture = ChangeFixture::parse(&database, ra_fixture);
let change_fixture = ChangeFixture::parse(ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id, offset })
}

View file

@ -95,7 +95,7 @@ fn f() {
//^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
#[cfg(no)] #[cfg(no2)] mod m;
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
#[cfg(all(not(a), b))] enum E {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
@ -130,7 +130,6 @@ trait Bar {
/// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
#[test]
fn inactive_via_cfg_attr() {
cov_mark::check!(cfg_attr_active);
check(
r#"
#[cfg_attr(not(never), cfg(no))] fn f() {}

View file

@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
ctx: &DiagnosticsContext<'_>,
d: &hir::InvalidDeriveTarget,
) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.node);
let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
Diagnostic::new(
DiagnosticCode::RustcHardError("E0774"),
@ -29,7 +29,7 @@ mod tests {
//- minicore:derive
mod __ {
#[derive()]
//^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
// ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
fn main() {}
}
"#,

Some files were not shown because too many files have changed in this diff Show more