Merge from rustc

This commit is contained in:
The Miri Conjob Bot 2023-12-14 05:02:50 +00:00
commit 7d4f92a309
664 changed files with 14806 additions and 8430 deletions

@ -1 +1 @@
Subproject commit 9787229614b27854cf73d57ffae430d7c1e6caa4
Subproject commit 1aa9df1a5be205cce621f0bc0ea6062a5e22a98c

View file

@ -62,7 +62,7 @@ impl LateLintPass<'_> for AbsolutePaths {
} = self;
if !path.span.from_expansion()
&& let Some(node) = cx.tcx.hir().find(hir_id)
&& let Some(node) = cx.tcx.opt_hir_node(hir_id)
&& !matches!(node, Node::Item(item) if matches!(item.kind, ItemKind::Use(_, _)))
&& let [first, rest @ ..] = path.segments
// Handle `::std`

View file

@ -69,7 +69,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, msrv: &Msrv
fn is_child_of_cast(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let map = cx.tcx.hir();
if let Some(parent_id) = map.opt_parent_id(expr.hir_id)
&& let Some(parent) = map.find(parent_id)
&& let Some(parent) = cx.tcx.opt_hir_node(parent_id)
{
let expr = match parent {
Node::Block(block) => {

View file

@ -1090,7 +1090,7 @@ fn report<'tcx>(
if parent_id == data.first_expr.hir_id {
return;
}
(cx.tcx.hir().get(parent_id).expect_expr().span, true)
(cx.tcx.hir_node(parent_id).expect_expr().span, true)
} else {
(expr.span, false)
};

View file

@ -195,7 +195,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
&& let Some(def_id) = trait_ref.trait_def_id()
&& cx.tcx.is_diagnostic_item(sym::Default, def_id)
&& let impl_item_hir = child.id.hir_id()
&& let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir)
&& let Some(Node::ImplItem(impl_item)) = cx.tcx.opt_hir_node(impl_item_hir)
&& let ImplItemKind::Fn(_, b) = &impl_item.kind
&& let Body { value: func_expr, .. } = cx.tcx.hir().body(*b)
&& let &Adt(adt_def, args) = cx.tcx.type_of(item.owner_id).instantiate_identity().kind()

View file

@ -42,7 +42,7 @@ impl LateLintPass<'_> for EmptyDrop {
}) = item.kind
&& trait_ref.trait_def_id() == cx.tcx.lang_items().drop_trait()
&& let impl_item_hir = child.id.hir_id()
&& let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir)
&& let Some(Node::ImplItem(impl_item)) = cx.tcx.opt_hir_node(impl_item_hir)
&& let ImplItemKind::Fn(_, b) = &impl_item.kind
&& let Body { value: func_expr, .. } = cx.tcx.hir().body(*b)
&& let func_expr = peel_blocks(func_expr)

View file

@ -5,7 +5,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, TraitRef, Ty};
use rustc_middle::ty::{self, TraitRef, Ty, TyCtxt};
use rustc_session::impl_lint_pass;
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::kw;
@ -76,7 +76,7 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
.hir()
.get_parent_item(cx.tcx.local_def_id_to_hir_id(fn_def_id))
.def_id;
let parent_node = cx.tcx.hir().find_by_def_id(parent_id);
let parent_node = cx.tcx.opt_hir_node_by_def_id(parent_id);
let mut trait_self_ty = None;
if let Some(Node::Item(item)) = parent_node {
@ -122,8 +122,8 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
}
// TODO: Replace with Map::is_argument(..) when it's fixed
fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
match map.find(id) {
fn is_argument(tcx: TyCtxt<'_>, id: HirId) -> bool {
match tcx.opt_hir_node(id) {
Some(Node::Pat(Pat {
kind: PatKind::Binding(..),
..
@ -131,7 +131,7 @@ fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
_ => return false,
}
matches!(map.find_parent(id), Some(Node::Param(_)))
matches!(tcx.hir().find_parent(id), Some(Node::Param(_)))
}
impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
@ -154,7 +154,7 @@ impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
let map = &self.cx.tcx.hir();
if is_argument(*map, cmt.hir_id) {
if is_argument(self.cx.tcx, cmt.hir_id) {
// Skip closure arguments
let parent_id = map.parent_id(cmt.hir_id);
if let Some(Node::Expr(..)) = map.find_parent(parent_id) {

View file

@ -46,7 +46,7 @@ impl<'tcx> LateLintPass<'tcx> for Exit {
&& let Some(def_id) = cx.qpath_res(path, path_expr.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::process_exit, def_id)
&& let parent = cx.tcx.hir().get_parent_item(e.hir_id).def_id
&& let Some(Node::Item(Item{kind: ItemKind::Fn(..), ..})) = cx.tcx.hir().find_by_def_id(parent)
&& let Some(Node::Item(Item{kind: ItemKind::Fn(..), ..})) = cx.tcx.opt_hir_node_by_def_id(parent)
// If the next item up is a function we check if it is an entry point
// and only then emit a linter warning
&& !is_entrypoint_fn(cx, parent.to_def_id())

View file

@ -111,7 +111,7 @@ fn look_in_block<'tcx, 'hir>(cx: &LateContext<'tcx>, kind: &'tcx ExprKind<'hir>)
// Find id of the local that expr_end_of_block resolves to
&& let ExprKind::Path(QPath::Resolved(None, expr_path)) = expr_end_of_block.kind
&& let Res::Local(expr_res) = expr_path.res
&& let Some(Node::Pat(res_pat)) = cx.tcx.hir().find(expr_res)
&& let Some(Node::Pat(res_pat)) = cx.tcx.opt_hir_node(expr_res)
// Find id of the local we found in the block
&& let PatKind::Binding(BindingAnnotation::NONE, local_hir_id, _ident, None) = local.pat.kind

View file

@ -92,7 +92,7 @@ fn check_result_large_err<'tcx>(cx: &LateContext<'tcx>, err_ty: Ty<'tcx>, hir_ty
.expect("already checked this is adt")
.did()
.as_local()
&& let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id(local_def_id)
&& let Some(hir::Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(local_def_id)
&& let hir::ItemKind::Enum(ref def, _) = item.kind
{
let variants_size = AdtVariantInfo::new(cx, *adt, subst);

View file

@ -248,7 +248,7 @@ impl<'a, 'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'a, 'tcx> {
// Checking for slice indexing
&& let parent_id = map.parent_id(expr.hir_id)
&& let Some(hir::Node::Expr(parent_expr)) = map.find(parent_id)
&& let Some(hir::Node::Expr(parent_expr)) = cx.tcx.opt_hir_node(parent_id)
&& let hir::ExprKind::Index(_, index_expr, _) = parent_expr.kind
&& let Some(Constant::Int(index_value)) = constant(cx, cx.typeck_results(), index_expr)
&& let Ok(index_value) = index_value.try_into()
@ -256,7 +256,7 @@ impl<'a, 'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'a, 'tcx> {
// Make sure that this slice index is read only
&& let maybe_addrof_id = map.parent_id(parent_id)
&& let Some(hir::Node::Expr(maybe_addrof_expr)) = map.find(maybe_addrof_id)
&& let Some(hir::Node::Expr(maybe_addrof_expr)) = cx.tcx.opt_hir_node(maybe_addrof_id)
&& let hir::ExprKind::AddrOf(_kind, hir::Mutability::Not, _inner_expr) = maybe_addrof_expr.kind
{
use_info.index_use.push((index_value, map.span(parent_expr.hir_id)));

View file

@ -122,7 +122,7 @@ fn get_impl_span(cx: &LateContext<'_>, id: LocalDefId) -> Option<Span> {
kind: ItemKind::Impl(impl_item),
span,
..
}) = cx.tcx.hir().get(id)
}) = cx.tcx.hir_node(id)
{
(!span.from_expansion()
&& impl_item.generics.params.is_empty()

View file

@ -147,7 +147,7 @@ impl<'tcx> LateLintPass<'tcx> for LenZero {
&& let Some(output) =
parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder())
{
let (name, kind) = match cx.tcx.hir().find(ty_hir_id) {
let (name, kind) = match cx.tcx.opt_hir_node(ty_hir_id) {
Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"),
Some(Node::Item(x)) => match x.kind {
ItemKind::Struct(..) => (x.ident.name, "struct"),

View file

@ -195,7 +195,7 @@ fn check_fn_inner<'tcx>(
.iter()
// In principle, the result of the call to `Node::ident` could be `unwrap`ped, as `DefId` should refer to a
// `Node::GenericParam`.
.filter_map(|&def_id| cx.tcx.hir().get_by_def_id(def_id).ident())
.filter_map(|&def_id| cx.tcx.hir_node_by_def_id(def_id).ident())
.map(|ident| ident.to_string())
.collect::<Vec<_>>()
.join(", ");

View file

@ -40,7 +40,7 @@ fn mut_warn_with_span(cx: &LateContext<'_>, span: Option<Span>) {
fn check_for_mutability(cx: &LateContext<'_>, bound: &Expr<'_>) -> Option<HirId> {
if let Some(hir_id) = path_to_local(bound)
&& let Node::Pat(pat) = cx.tcx.hir().get(hir_id)
&& let Node::Pat(pat) = cx.tcx.hir_node(hir_id)
&& let PatKind::Binding(BindingAnnotation::MUT, ..) = pat.kind
{
return Some(hir_id);

View file

@ -58,12 +58,12 @@ pub(super) fn check<'tcx>(
match cx.qpath_res(qpath, pushed_item.hir_id) {
// immutable bindings that are initialized with literal or constant
Res::Local(hir_id) => {
let node = cx.tcx.hir().get(hir_id);
let node = cx.tcx.hir_node(hir_id);
if let Node::Pat(pat) = node
&& let PatKind::Binding(bind_ann, ..) = pat.kind
&& !matches!(bind_ann, BindingAnnotation(_, Mutability::Mut))
&& let parent_node = cx.tcx.hir().parent_id(hir_id)
&& let Some(Node::Local(parent_let_expr)) = cx.tcx.hir().find(parent_node)
&& let Some(Node::Local(parent_let_expr)) = cx.tcx.opt_hir_node(parent_node)
&& let Some(init) = parent_let_expr.init
{
match init.kind {

View file

@ -58,7 +58,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
&& block.stmts.is_empty()
&& let Some(closure_body) = desugared_async_block(cx, block)
&& let Node::Item(Item {vis_span, ..}) | Node::ImplItem(ImplItem {vis_span, ..}) =
cx.tcx.hir().get_by_def_id(def_id)
cx.tcx.hir_node_by_def_id(def_id)
{
let header_span = span.with_hi(ret_ty.span.hi());

View file

@ -76,7 +76,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualRemEuclid {
// Also ensures the const is nonzero since zero can't be a divisor
&& const1 == const2 && const2 == const3
&& let Some(hir_id) = path_to_local(expr3)
&& let Some(Node::Pat(_)) = cx.tcx.hir().find(hir_id)
&& let Some(Node::Pat(_)) = cx.tcx.opt_hir_node(hir_id)
{
// Apply only to params or locals with annotated types
match cx.tcx.hir().find_parent(hir_id) {

View file

@ -44,7 +44,7 @@ pub(super) fn check<'tcx>(
// add note if not multi-line
span_lint_and_then(cx, FILTER_NEXT, expr.span, msg, |diag| {
let (applicability, pat) = if let Some(id) = path_to_local(recv)
&& let Some(hir::Node::Pat(pat)) = cx.tcx.hir().find(id)
&& let Some(hir::Node::Pat(pat)) = cx.tcx.opt_hir_node(id)
&& let hir::PatKind::Binding(BindingAnnotation(_, Mutability::Not), _, ident, _) = pat.kind
{
(Applicability::Unspecified, Some((pat.span, ident)))

View file

@ -20,7 +20,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr
"called `skip(..).next()` on an iterator",
|diag| {
if let Some(id) = path_to_local(recv)
&& let Node::Pat(pat) = cx.tcx.hir().get(id)
&& let Node::Pat(pat) = cx.tcx.hir_node(id)
&& let PatKind::Binding(ann, _, _, _) = pat.kind
&& ann != BindingAnnotation::MUT
{

View file

@ -135,7 +135,7 @@ impl<'a, 'tcx> Visitor<'tcx> for UnwrapVisitor<'a, 'tcx> {
fn visit_path(&mut self, path: &Path<'tcx>, _: HirId) {
if let Res::Local(local_id) = path.res
&& let Some(Node::Pat(pat)) = self.cx.tcx.hir().find(local_id)
&& let Some(Node::Pat(pat)) = self.cx.tcx.opt_hir_node(local_id)
&& let PatKind::Binding(_, local_id, ..) = pat.kind
{
self.identifiers.insert(local_id);
@ -166,7 +166,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ReferenceVisitor<'a, 'tcx> {
&& let ExprKind::Path(ref path) = expr.kind
&& let QPath::Resolved(_, path) = path
&& let Res::Local(local_id) = path.res
&& let Some(Node::Pat(pat)) = self.cx.tcx.hir().find(local_id)
&& let Some(Node::Pat(pat)) = self.cx.tcx.opt_hir_node(local_id)
&& let PatKind::Binding(_, local_id, ..) = pat.kind
&& self.identifiers.contains(&local_id)
{

View file

@ -91,7 +91,7 @@ impl Visitor<'_> for IdentVisitor<'_, '_> {
let node = if hir_id.local_id == ItemLocalId::from_u32(0) {
// In this case, we can just use `find`, `Owner`'s `node` field is private anyway so we can't
// reimplement it even if we wanted to
cx.tcx.hir().find(hir_id)
cx.tcx.opt_hir_node(hir_id)
} else {
let Some(owner) = cx.tcx.hir_owner_nodes(hir_id.owner).as_owner() else {
return;

View file

@ -137,7 +137,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn {
{
let parent = cx.tcx.hir().get_parent_item(hir_id).def_id;
if parent != CRATE_DEF_ID {
if let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent) {
if let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) {
if let hir::ItemKind::Trait(..) = &item.kind {
return;
}

View file

@ -220,7 +220,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug {
&& let self_ty = cx.tcx.type_of(self_path_did).skip_binder().peel_refs()
&& let Some(self_adt) = self_ty.ty_adt_def()
&& let Some(self_def_id) = self_adt.did().as_local()
&& let Some(Node::Item(self_item)) = cx.tcx.hir().find_by_def_id(self_def_id)
&& let Some(Node::Item(self_item)) = cx.tcx.opt_hir_node_by_def_id(self_def_id)
// NB: can't call cx.typeck_results() as we are not in a body
&& let typeck_results = cx.tcx.typeck_body(*body_id)
&& should_lint(cx, typeck_results, block)

View file

@ -213,7 +213,7 @@ fn check_for_unsequenced_reads(vis: &mut ReadVisitor<'_, '_>) {
if parent_id == cur_id {
break;
}
let Some(parent_node) = map.find(parent_id) else { break };
let Some(parent_node) = vis.cx.tcx.opt_hir_node(parent_id) else { break };
let stop_early = match parent_node {
Node::Expr(expr) => check_expr(vis, expr),

View file

@ -113,8 +113,9 @@ fn check_closures<'tcx>(
}
ctx.prev_bind = None;
ctx.prev_move_to_closure.clear();
if let Some(body) = hir
.find_by_def_id(closure)
if let Some(body) = cx
.tcx
.opt_hir_node_by_def_id(closure)
.and_then(associated_body)
.map(|(_, body_id)| hir.body(body_id))
{
@ -412,7 +413,7 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> {
],
),
..
}) = self.tcx.hir().get(cmt.hir_id)
}) = self.tcx.hir_node(cmt.hir_id)
{
self.async_closures.insert(*def_id);
}
@ -521,7 +522,7 @@ impl<'tcx> Visitor<'tcx> for FnNeedsMutVisitor<'_, 'tcx> {
let Self { cx, used_fn_def_ids } = self;
// #11182; do not lint if mutability is required elsewhere
if let Node::Expr(expr) = cx.tcx.hir().get(hir_id)
if let Node::Expr(expr) = cx.tcx.hir_node(hir_id)
&& let Some(parent) = get_parent_node(cx.tcx, expr.hir_id)
&& let ty::FnDef(def_id, _) = cx
.tcx

View file

@ -454,7 +454,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
if parent_id == cur_expr.hir_id {
break;
}
if let Some(Node::Expr(parent_expr)) = cx.tcx.hir().find(parent_id) {
if let Some(Node::Expr(parent_expr)) = cx.tcx.opt_hir_node(parent_id) {
match &parent_expr.kind {
ExprKind::AddrOf(..) => {
// `&e` => `e` must be referenced.

View file

@ -94,7 +94,6 @@ impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
{
let mut non_send_fields = Vec::new();
let hir_map = cx.tcx.hir();
for variant in adt_def.variants() {
for field in &variant.fields {
if let Some(field_hir_id) = field
@ -104,7 +103,7 @@ impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
&& !is_lint_allowed(cx, NON_SEND_FIELDS_IN_SEND_TY, field_hir_id)
&& let field_ty = field.ty(cx.tcx, impl_trait_args)
&& !ty_allowed_in_send(cx, field_ty, send_trait)
&& let Node::Field(field_def) = hir_map.get(field_hir_id)
&& let Node::Field(field_def) = cx.tcx.hir_node(field_hir_id)
{
non_send_fields.push(NonSendField {
def: field_def,

View file

@ -59,7 +59,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantLocals {
&& last_segment.ident == ident
// resolve the path to its defining binding pattern
&& let Res::Local(binding_id) = cx.qpath_res(&qpath, expr.hir_id)
&& let Node::Pat(binding_pat) = cx.tcx.hir().get(binding_id)
&& let Node::Pat(binding_pat) = cx.tcx.hir_node(binding_id)
// the previous binding has the same mutability
&& find_binding(binding_pat, ident).is_some_and(|bind| bind.1 == mutability)
// the local does not change the effect of assignments to the binding. see #11290

View file

@ -77,7 +77,7 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod {
Some(trait_ref) => {
let mut methods_in_trait: BTreeSet<Symbol> = if let Some(Node::TraitRef(TraitRef {
path, ..
})) = cx.tcx.hir().find(trait_ref.hir_ref_id)
})) = cx.tcx.opt_hir_node(trait_ref.hir_ref_id)
&& let Res::Def(DefKind::Trait, did) = path.res
{
// FIXME: if

View file

@ -73,7 +73,7 @@ impl<'tcx> LateLintPass<'tcx> for SelfNamedConstructors {
if let Some(self_def) = self_ty.ty_adt_def()
&& let Some(self_local_did) = self_def.did().as_local()
&& let self_id = cx.tcx.local_def_id_to_hir_id(self_local_did)
&& let Some(Node::Item(x)) = cx.tcx.hir().find(self_id)
&& let Some(Node::Item(x)) = cx.tcx.opt_hir_node(self_id)
&& let type_name = x.ident.name.as_str().to_lowercase()
&& (impl_item.ident.name.as_str() == type_name
|| impl_item.ident.name.as_str().replace('_', "") == type_name)

View file

@ -64,7 +64,7 @@ impl<'tcx> LateLintPass<'tcx> for SuspiciousImpl {
// Check for more than one binary operation in the implemented function
// Linting when multiple operations are involved can result in false positives
&& let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id
&& let hir::Node::ImplItem(impl_item) = cx.tcx.hir().get_by_def_id(parent_fn)
&& let hir::Node::ImplItem(impl_item) = cx.tcx.hir_node_by_def_id(parent_fn)
&& let hir::ImplItemKind::Fn(_, body_id) = impl_item.kind
&& let body = cx.tcx.hir().body(body_id)
&& let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id).def_id

View file

@ -321,7 +321,7 @@ impl<'tcx> LateLintPass<'tcx> for Types {
_: Span,
def_id: LocalDefId,
) {
let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id(
let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(
cx.tcx
.hir()
.get_parent_item(cx.tcx.local_def_id_to_hir_id(def_id))
@ -368,8 +368,7 @@ impl<'tcx> LateLintPass<'tcx> for Types {
ImplItemKind::Const(ty, _) => {
let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx
.tcx
.hir()
.find_by_def_id(cx.tcx.hir().get_parent_item(item.hir_id()).def_id)
.opt_hir_node_by_def_id(cx.tcx.hir().get_parent_item(item.hir_id()).def_id)
{
matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
} else {

View file

@ -82,7 +82,7 @@ impl LateLintPass<'_> for UnnecessaryStruct {
fn is_mutable(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
if let Some(hir_id) = path_to_local(expr)
&& let Node::Pat(pat) = cx.tcx.hir().get(hir_id)
&& let Node::Pat(pat) = cx.tcx.hir_node(hir_id)
{
matches!(pat.kind, PatKind::Binding(BindingAnnotation::MUT, ..))
} else {

View file

@ -281,7 +281,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
}
if let Some(id) = path_to_local(recv)
&& let Node::Pat(pat) = cx.tcx.hir().get(id)
&& let Node::Pat(pat) = cx.tcx.hir_node(id)
&& let PatKind::Binding(ann, ..) = pat.kind
&& ann != BindingAnnotation::MUT
{

View file

@ -218,7 +218,7 @@ fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Ve
ExprKind::Path(ref qpath) => match cx.qpath_res(qpath, expr.hir_id) {
Res::Local(hir_id) => {
let parent_id = cx.tcx.hir().parent_id(hir_id);
if let Some(Node::Local(Local { init: Some(init), .. })) = cx.tcx.hir().find(parent_id) {
if let Some(Node::Local(Local { init: Some(init), .. })) = cx.tcx.hir_node(parent_id) {
path_to_matched_type(cx, init)
} else {
None

View file

@ -74,7 +74,7 @@ impl LateLintPass<'_> for ZeroSizedMapValues {
fn in_trait_impl(cx: &LateContext<'_>, hir_id: HirId) -> bool {
let parent_id = cx.tcx.hir().get_parent_item(hir_id);
let second_parent_id = cx.tcx.hir().get_parent_item(parent_id.into()).def_id;
if let Some(Node::Item(item)) = cx.tcx.hir().find_by_def_id(second_parent_id) {
if let Some(Node::Item(item)) = cx.tcx.opt_hir_node_by_def_id(second_parent_id) {
if let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind {
return true;
}

View file

@ -269,7 +269,7 @@ fn fn_kind_pat(tcx: TyCtxt<'_>, kind: &FnKind<'_>, body: &Body<'_>, hir_id: HirI
FnKind::Method(.., sig) => (fn_header_search_pat(sig.header), Pat::Str("")),
FnKind::Closure => return (Pat::Str(""), expr_search_pat(tcx, body.value).1),
};
let start_pat = match tcx.hir().get(hir_id) {
let start_pat = match tcx.hir_node(hir_id) {
Node::Item(Item { vis_span, .. }) | Node::ImplItem(ImplItem { vis_span, .. }) => {
if vis_span.is_empty() {
start_pat

View file

@ -531,7 +531,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
kind: ExprKind::Lit(_),
span,
..
}) = self.lcx.tcx.hir().get(body_id.hir_id)
}) = self.lcx.tcx.hir_node(body_id.hir_id)
&& is_direct_expn_of(*span, "cfg").is_some()
{
return None;

View file

@ -176,10 +176,10 @@ pub fn expr_or_init<'a, 'b, 'tcx: 'b>(cx: &LateContext<'tcx>, mut expr: &'a Expr
/// canonical binding `HirId`.
pub fn find_binding_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> {
let hir = cx.tcx.hir();
if let Some(Node::Pat(pat)) = hir.find(hir_id)
if let Some(Node::Pat(pat)) = cx.tcx.opt_hir_node(hir_id)
&& matches!(pat.kind, PatKind::Binding(BindingAnnotation::NONE, ..))
&& let parent = hir.parent_id(hir_id)
&& let Some(Node::Local(local)) = hir.find(parent)
&& let Some(Node::Local(local)) = cx.tcx.opt_hir_node(parent)
{
return local.init;
}
@ -563,7 +563,7 @@ fn local_item_children_by_name(tcx: TyCtxt<'_>, local_id: LocalDefId, name: Symb
let hir = tcx.hir();
let root_mod;
let item_kind = match hir.find_by_def_id(local_id) {
let item_kind = match tcx.opt_hir_node_by_def_id(local_id) {
Some(Node::Crate(r#mod)) => {
root_mod = ItemKind::Mod(r#mod);
&root_mod
@ -712,7 +712,7 @@ pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'tcx>, def_id: LocalDefId) ->
let hir_id = cx.tcx.local_def_id_to_hir_id(def_id);
let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
if parent_impl != hir::CRATE_OWNER_ID
&& let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent_impl.def_id)
&& let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent_impl.def_id)
&& let hir::ItemKind::Impl(impl_) = &item.kind
{
return impl_.of_trait.as_ref();
@ -1242,7 +1242,7 @@ pub fn is_in_panic_handler(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
/// Gets the name of the item the expression is in, if available.
pub fn get_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Symbol> {
let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id).def_id;
match cx.tcx.hir().find_by_def_id(parent_id) {
match cx.tcx.opt_hir_node_by_def_id(parent_id) {
Some(
Node::Item(Item { ident, .. })
| Node::TraitItem(TraitItem { ident, .. })
@ -1319,7 +1319,7 @@ pub fn get_enclosing_block<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Optio
let map = &cx.tcx.hir();
let enclosing_node = map
.get_enclosing_scope(hir_id)
.and_then(|enclosing_id| map.find(enclosing_id));
.and_then(|enclosing_id| cx.tcx.opt_hir_node(enclosing_id));
enclosing_node.and_then(|node| match node {
Node::Block(block) => Some(block),
Node::Item(&Item {
@ -2691,7 +2691,7 @@ impl<'tcx> ExprUseNode<'tcx> {
if let Some(Node::Expr(Expr {
kind: ExprKind::Closure(c),
..
})) = cx.tcx.hir().find(hir_id)
})) = cx.tcx.opt_hir_node(hir_id)
{
match c.fn_decl.output {
FnRetTy::DefaultReturn(_) => None,
@ -2757,7 +2757,7 @@ pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> Optio
walk_to_expr_usage(cx, e, &mut |parent, child_id| {
// LocalTableInContext returns the wrong lifetime, so go use `expr_adjustments` instead.
if adjustments.is_empty()
&& let Node::Expr(e) = cx.tcx.hir().get(child_id)
&& let Node::Expr(e) = cx.tcx.hir_node(child_id)
{
adjustments = cx.typeck_results().expr_adjustments(e);
}

View file

@ -6,6 +6,16 @@ use crate::licenses::LicensesInterner;
use anyhow::Error;
use std::path::PathBuf;
// Some directories have too many slight license differences that'd result in a
// huge report, and could be considered a standalone project anyway. Those
// directories are "condensed" into a single licensing block for ease of
// reading, merging the licensing information.
//
// For every `(dir, file)``, every file in `dir` is considered to have the
// license info of `file`.
const CONDENSED_DIRECTORIES: &[(&str, &str)] =
&[("./src/llvm-project/", "./src/llvm-project/README.md")];
fn main() -> Result<(), Error> {
let reuse_exe: PathBuf = std::env::var_os("REUSE_EXE").expect("Missing REUSE_EXE").into();
let dest: PathBuf = std::env::var_os("DEST").expect("Missing DEST").into();

View file

@ -4,7 +4,7 @@
//! passes over the tree to remove redundant information.
use crate::licenses::{License, LicenseId, LicensesInterner};
use std::collections::BTreeMap;
use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
#[derive(serde::Serialize)]
@ -12,6 +12,7 @@ use std::path::{Path, PathBuf};
pub(crate) enum Node<L> {
Root { children: Vec<Node<L>> },
Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
CondensedDirectory { name: PathBuf, licenses: Vec<L> },
File { name: PathBuf, license: L },
Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
Empty,
@ -57,9 +58,9 @@ impl Node<LicenseId> {
Node::Directory { name, mut children, license: None } => {
directories.entry(name).or_insert_with(Vec::new).append(&mut children);
}
file @ Node::File { .. } => {
files.push(file);
}
file @ Node::File { .. } => files.push(file),
// Propagate condensed directories as-is.
condensed @ Node::CondensedDirectory { .. } => files.push(condensed),
Node::Empty => {}
Node::Root { .. } => {
panic!("can't have a root inside another element");
@ -86,6 +87,7 @@ impl Node<LicenseId> {
}
Node::Empty => {}
Node::File { .. } => {}
Node::CondensedDirectory { .. } => {}
Node::Group { .. } => {
panic!("Group should not be present at this stage");
}
@ -132,6 +134,7 @@ impl Node<LicenseId> {
}
}
Node::File { .. } => {}
Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("group should not be present at this stage"),
Node::Empty => {}
}
@ -174,6 +177,9 @@ impl Node<LicenseId> {
Node::Directory { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
}
Node::CondensedDirectory { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
}
Node::File { name: child_child_name, .. } => {
*child_child_name = child_name.join(&child_child_name);
}
@ -188,6 +194,7 @@ impl Node<LicenseId> {
}
Node::Empty => {}
Node::File { .. } => {}
Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("Group should not be present at this stage"),
}
}
@ -255,6 +262,7 @@ impl Node<LicenseId> {
}
}
Node::File { .. } => {}
Node::CondensedDirectory { .. } => {}
Node::Group { .. } => panic!("FileGroup should not be present at this stage"),
Node::Empty => {}
}
@ -270,6 +278,7 @@ impl Node<LicenseId> {
}
children.retain(|child| !matches!(child, Node::Empty));
}
Node::CondensedDirectory { .. } => {}
Node::Group { .. } => {}
Node::File { .. } => {}
Node::Empty => {}
@ -293,7 +302,24 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
// Ensure reproducibility of all future steps.
input.sort();
for (path, license) in input {
let mut condensed_directories = BTreeMap::new();
'outer: for (path, license) in input {
// Files in condensed directories are handled separately.
for (condensed_directory, allowed_file) in super::CONDENSED_DIRECTORIES {
if path.starts_with(condensed_directory) {
if path.as_path() == Path::new(allowed_file) {
// The licence on our allowed file is used to represent the entire directory
condensed_directories
.entry(*condensed_directory)
.or_insert_with(BTreeSet::new)
.insert(license);
} else {
// don't add the file
}
continue 'outer;
}
}
let mut node = Node::File { name: path.file_name().unwrap().into(), license };
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory {
@ -306,6 +332,22 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
children.push(node);
}
for (path, licenses) in condensed_directories {
let path = Path::new(path);
let mut node = Node::CondensedDirectory {
name: path.file_name().unwrap().into(),
licenses: licenses.iter().copied().collect(),
};
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory {
name: component.as_os_str().into(),
children: vec![node],
license: None,
};
}
children.push(node);
}
Node::Root { children }
}
@ -334,6 +376,10 @@ pub(crate) fn expand_interned_licenses(
Node::Group { files, directories, license } => {
Node::Group { files, directories, license: interner.resolve(license) }
}
Node::CondensedDirectory { name, licenses } => Node::CondensedDirectory {
name,
licenses: licenses.into_iter().map(|license| interner.resolve(license)).collect(),
},
Node::Empty => Node::Empty,
}
}

View file

@ -17,9 +17,11 @@ pub(crate) fn collect(
let mut result = Vec::new();
for file in document.file_information {
let concluded_license = file.concluded_license.expect("File should have licence info");
let copyright_text = file.copyright_text.expect("File should have copyright text");
let license = interner.intern(License {
spdx: file.concluded_license.to_string(),
copyright: file.copyright_text.split('\n').map(|s| s.into()).collect(),
spdx: concluded_license.to_string(),
copyright: copyright_text.split('\n').map(|s| s.into()).collect(),
});
result.push((file.file_name.into(), license));
@ -30,7 +32,7 @@ pub(crate) fn collect(
fn obtain_spdx_document(reuse_exe: &Path) -> Result<String, Error> {
let output = Command::new(reuse_exe)
.args(&["spdx", "--add-license-concluded", "--creator-person=bors"])
.args(&["--include-submodules", "spdx", "--add-license-concluded", "--creator-person=bors"])
.stdout(Stdio::piped())
.spawn()?
.wait_with_output()?;

View file

@ -1,4 +1,5 @@
use anyhow::Error;
use std::collections::BTreeSet;
use std::io::Write;
use std::path::PathBuf;
@ -26,7 +27,7 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
}
}
Node::Directory { name, children, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?;
render_license(&prefix, std::iter::once(name), license.iter(), buffer)?;
if !children.is_empty() {
writeln!(buffer, "{prefix}")?;
writeln!(buffer, "{prefix}*Exceptions:*")?;
@ -36,11 +37,19 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
}
}
}
Node::CondensedDirectory { name, licenses } => {
render_license(&prefix, std::iter::once(name), licenses.iter(), buffer)?;
}
Node::Group { files, directories, license } => {
render_license(&prefix, directories.iter().chain(files.iter()), license, buffer)?;
render_license(
&prefix,
directories.iter().chain(files.iter()),
std::iter::once(license),
buffer,
)?;
}
Node::File { name, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?;
render_license(&prefix, std::iter::once(name), std::iter::once(license), buffer)?;
}
}
@ -50,15 +59,26 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
fn render_license<'a>(
prefix: &str,
names: impl Iterator<Item = &'a String>,
license: &License,
licenses: impl Iterator<Item = &'a License>,
buffer: &mut Vec<u8>,
) -> Result<(), Error> {
let mut spdxs = BTreeSet::new();
let mut copyrights = BTreeSet::new();
for license in licenses {
spdxs.insert(&license.spdx);
for copyright in &license.copyright {
copyrights.insert(copyright);
}
}
for name in names {
writeln!(buffer, "{prefix}**`{name}`** ")?;
}
writeln!(buffer, "{prefix}License: `{}` ", license.spdx)?;
for (i, copyright) in license.copyright.iter().enumerate() {
let suffix = if i == license.copyright.len() - 1 { "" } else { " " };
for spdx in spdxs.iter() {
writeln!(buffer, "{prefix}License: `{spdx}` ")?;
}
for (i, copyright) in copyrights.iter().enumerate() {
let suffix = if i == copyrights.len() - 1 { "" } else { " " };
writeln!(buffer, "{prefix}Copyright: {copyright}{suffix}")?;
}
@ -74,7 +94,8 @@ struct Metadata {
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node {
Root { children: Vec<Node> },
Directory { name: String, children: Vec<Node>, license: License },
Directory { name: String, children: Vec<Node>, license: Option<License> },
CondensedDirectory { name: String, licenses: Vec<License> },
File { name: String, license: License },
Group { files: Vec<String>, directories: Vec<String>, license: License },
}

View file

@ -144,7 +144,7 @@ impl rustc_driver::Callbacks for MiriBeRustCompilerCalls {
// Otherwise it may cause unexpected behaviours and ICEs
// (https://github.com/rust-lang/rust/issues/86261).
let is_reachable_non_generic = matches!(
tcx.hir().get(tcx.local_def_id_to_hir_id(local_def_id)),
tcx.hir_node_by_def_id(local_def_id),
Node::Item(&hir::Item {
kind: hir::ItemKind::Static(..) | hir::ItemKind::Fn(..),
..

View file

@ -0,0 +1,29 @@
#![feature(extern_types)]
extern "C" {
type Opaque;
}
struct Newtype(Opaque);
struct S {
i: i32,
j: i32,
a: Newtype,
}
fn main() {
let buf = [0i32; 4];
let x: &Newtype = unsafe { &*(&buf as *const _ as *const Newtype) };
// Projecting to the newtype works, because it is always at offset 0.
let _field = &x.0;
let x: &S = unsafe { &*(&buf as *const _ as *const S) };
// Accessing sized fields is perfectly fine, even at non-zero offsets.
let _field = &x.i;
let _field = &x.j;
// This needs to compute the field offset, but we don't know the type's alignment,
// so this panics.
let _field = &x.a; //~ERROR: does not have a known offset
}

View file

@ -0,0 +1,14 @@
error: unsupported operation: `extern type` does not have a known offset
--> $DIR/extern-type-field-offset.rs:LL:CC
|
LL | let _field = &x.a;
| ^^^^ `extern type` does not have a known offset
|
= help: this is likely not a bug in the program; it indicates that the program performed an operation that the interpreter does not support
= note: BACKTRACE:
= note: inside `main` at $DIR/extern-type-field-offset.rs:LL:CC
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
error: aborting due to 1 previous error

View file

@ -276,7 +276,7 @@ dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset 0.9.0",
"memoffset",
"scopeguard",
]
@ -301,12 +301,12 @@ dependencies = [
[[package]]
name = "dashmap"
version = "5.4.0"
version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown 0.12.3",
"hashbrown",
"lock_api",
"once_cell",
"parking_lot_core",
@ -448,15 +448,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "hashbrown"
version = "0.12.3"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
@ -509,7 +503,7 @@ dependencies = [
"either",
"expect-test",
"fst",
"hashbrown 0.12.3",
"hashbrown",
"hir-expand",
"indexmap",
"intern",
@ -539,7 +533,7 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
"hashbrown 0.12.3",
"hashbrown",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -608,6 +602,7 @@ dependencies = [
name = "ide"
version = "0.0.0"
dependencies = [
"arrayvec",
"cfg",
"cov-mark",
"crossbeam-channel",
@ -764,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
"hashbrown 0.14.2",
"hashbrown",
]
[[package]]
@ -792,7 +787,7 @@ name = "intern"
version = "0.0.0"
dependencies = [
"dashmap",
"hashbrown 0.12.3",
"hashbrown",
"rustc-hash",
"triomphe",
]
@ -938,23 +933,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "lsp-server"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
version = "0.7.5"
dependencies = [
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"serde",
"serde_json",
]
@ -1002,15 +997,6 @@ dependencies = [
"libc",
]
[[package]]
name = "memoffset"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.0"
@ -1061,11 +1047,11 @@ dependencies = [
[[package]]
name = "miow"
version = "0.5.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
"windows-sys 0.42.0",
"windows-sys 0.48.0",
]
[[package]]
@ -1177,15 +1163,15 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.9.6"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
"libc",
"redox_syscall 0.2.16",
"redox_syscall 0.4.1",
"smallvec",
"windows-sys 0.42.0",
"windows-targets",
]
[[package]]
@ -1255,6 +1241,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"base-db",
"indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object 0.32.0",
"paths",
@ -1263,6 +1252,7 @@ dependencies = [
"serde_json",
"snap",
"stdx",
"text-size",
"tracing",
"triomphe",
"tt",
@ -1402,9 +1392,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5f38444d48da534b3bb612713fce9b0aeeffb2e0dfa242764f55482acc5b52d"
checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa"
dependencies = [
"bitflags 1.3.2",
"ra-ap-rustc_index",
@ -1413,9 +1403,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69fb5da07e1a39222d9c311203123c3b6a86420fa06dc695aa1661b0aecf8d16"
checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24"
dependencies = [
"arrayvec",
"ra-ap-rustc_index_macros",
@ -1424,9 +1414,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d69f9f6af58124f2da0cb8b0c3d8494e0d883a5fe0c6732258bde81ac5a87cc"
checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8"
dependencies = [
"proc-macro2",
"quote",
@ -1436,9 +1426,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5e8650195795c4023d8321846466994a975bc457cb8a91c0b3b17a5fc8ba40"
checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1446,9 +1436,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6b325ee1ec90e4dbd4394913adf4ef32e4fcf2b311ec9563a0fa50cd549af6"
checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1474,15 +1464,6 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.3.5"
@ -1493,14 +1474,23 @@ dependencies = [
]
[[package]]
name = "rowan"
version = "0.15.11"
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "rowan"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
"hashbrown 0.12.3",
"memoffset 0.8.0",
"hashbrown",
"memoffset",
"rustc-hash",
"text-size",
]
@ -1524,16 +1514,14 @@ dependencies = [
"ide-ssr",
"itertools",
"load-cargo",
"lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-server 0.7.4",
"lsp-types",
"mbe",
"mimalloc",
"mio",
"nohash-hasher",
"num_cpus",
"oorandom",
"parking_lot",
"parking_lot_core",
"parser",
"proc-macro-api",
"profile",
@ -1564,11 +1552,10 @@ dependencies = [
[[package]]
name = "rust-analyzer-salsa"
version = "0.17.0-pre.3"
version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ca92b657d614d076800aa7bf5d5ba33564e71fa7f16cd79eacdfe301a50ab1c"
checksum = "16c42b8737c320578b441a82daf7cdf8d897468de64e8a774fa54b53a50b6cc0"
dependencies = [
"crossbeam-utils",
"indexmap",
"lock_api",
"log",
@ -1581,9 +1568,9 @@ dependencies = [
[[package]]
name = "rust-analyzer-salsa-macros"
version = "0.17.0-pre.3"
version = "0.17.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b190359266d293f2ee13eaa502a766dc8b77b63fbaa5d460d24fd0210675ceef"
checksum = "db72b0883f3592ade2be15a10583c75e0b269ec26e1190800fda2e2ce5ae6634"
dependencies = [
"heck",
"proc-macro2",
@ -1751,6 +1738,7 @@ dependencies = [
"always-assert",
"backtrace",
"crossbeam-channel",
"itertools",
"jod-thread",
"libc",
"miow",
@ -2010,6 +1998,7 @@ version = "0.0.0"
dependencies = [
"smol_str",
"stdx",
"text-size",
]
[[package]]

View file

@ -42,7 +42,7 @@ debug = 0
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
# rust-analyzer-salsa = { path = "../salsa" }
[workspace.dependencies]
# local crates
@ -95,14 +95,25 @@ bitflags = "2.4.1"
cargo_metadata = "0.18.1"
dissimilar = "1.0.7"
either = "1.9.0"
hashbrown = { version = "0.14", features = [
"inline-more",
], default-features = false }
indexmap = "2.1.0"
itertools = "0.12.0"
libc = "0.2.150"
nohash-hasher = "0.2.0"
rayon = "1.8.0"
rust-analyzer-salsa = "0.17.0-pre.4"
rustc-hash = "1.1.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [
"const_new",
"union",
"const_generics",
] }
smol_str = "0.2.0"
text-size = "1.1.1"
tracing = "0.1.40"
tracing-tree = "0.3.0"
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
@ -110,15 +121,8 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"fmt",
"tracing-log",
] }
smol_str = "0.2.0"
nohash-hasher = "0.2.0"
text-size = "1.1.1"
rayon = "1.8.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently
hashbrown = { version = "0.12.3", features = [
"inline-more",
], default-features = false }
xshell = "0.2.5"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] }

View file

@ -12,12 +12,10 @@ rust-version.workspace = true
doctest = false
[dependencies]
rust-analyzer-salsa = "0.17.0-pre.3"
rustc-hash = "1.1.0"
triomphe.workspace = true
la-arena.workspace = true
rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
# local deps
cfg.workspace = true

View file

@ -8,11 +8,12 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
use tt::token_id::{Leaf, Subtree, TokenTree};
use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
span::SpanData,
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
@ -134,7 +135,7 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
let mut file_id = FileId(0);
let mut file_id = FileId::from_raw(0);
let mut roots = Vec::new();
let mut file_position = None;
@ -209,7 +210,7 @@ impl ChangeFixture {
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
files.push(file_id);
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
}
if crates.is_empty() {
@ -254,7 +255,7 @@ impl ChangeFixture {
if let Some(mini_core) = mini_core {
let core_file = file_id;
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
let mut fs = FileSet::default();
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
@ -295,7 +296,6 @@ impl ChangeFixture {
let mut proc_macros = ProcMacros::default();
if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
file_id.0 += 1;
proc_macro_defs.extend(default_test_proc_macros());
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs);
@ -539,10 +539,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
subtree: &Subtree,
_: Option<&Subtree>,
subtree: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@ -553,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
_: &Subtree,
attrs: Option<&Subtree>,
_: &Subtree<SpanData>,
attrs: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@ -568,11 +574,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
fn traverse(input: &Subtree) -> Subtree {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@ -595,13 +604,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
fn traverse(input: &Subtree) -> Subtree {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@ -613,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
fn modify_leaf(leaf: &Leaf) -> Leaf {
fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {

View file

@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@ -242,6 +243,9 @@ impl CrateDisplayName {
}
}
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@ -255,10 +259,13 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &Subtree,
attrs: Option<&Subtree>,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>;
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
#[derive(Debug)]
@ -323,7 +330,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
@ -871,7 +880,7 @@ mod tests {
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -884,7 +893,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -897,7 +906,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -933,7 +942,7 @@ mod tests {
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -946,7 +955,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -976,7 +985,7 @@ mod tests {
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -989,7 +998,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -1002,7 +1011,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -1032,7 +1041,7 @@ mod tests {
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -1045,7 +1054,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,

View file

@ -1,10 +1,11 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change;
pub mod fixture;
pub mod span;
use std::panic;
@ -12,14 +13,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@ -68,8 +68,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>;
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)

View file

@ -0,0 +1,203 @@
//! File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
// The first inde is always the root node's AstId
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContextId(InternId);
impl fmt::Debug for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if *self == Self::SELF_REF {
f.debug_tuple("SyntaxContextId")
.field(&{
#[derive(Debug)]
#[allow(non_camel_case_types)]
struct SELF_REF;
SELF_REF
})
.finish()
} else {
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
}
}
}
crate::impl_intern_key!(SyntaxContextId);
impl fmt::Display for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_u32())
}
}
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
// inherent trait impls please tyvm
impl SyntaxContextId {
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
// we need a special value that behaves as the current context.
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
pub fn is_root(self) -> bool {
self == Self::ROOT
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
pub file_id: FileId,
pub ast_id: ErasedFileAstId,
}
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
}
}
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID };
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(u32);
impl From<HirFileId> for u32 {
fn from(value: HirFileId) -> Self {
value.0
}
}
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
value.as_file()
}
}
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::InternId);
crate::impl_intern_key!(MacroCallId);
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFileId {
MacroFileId { macro_call_id: self }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFileId),
}
impl fmt::Debug for HirFileIdRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(),
Self::MacroFile(arg0) => {
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
}
}
}
}
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
assert!(id.index() < Self::MAX_FILE_ID);
HirFileId(id.index())
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId::from_raw(self.0)),
_ => None,
}
}
#[inline]
pub fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}

View file

@ -1,6 +1,6 @@
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod cfg_expr;
mod dnf;

View file

@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::syntax_node_to_token_tree;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -2,7 +2,7 @@
//! another compatible command (f.x. clippy) in a background thread and provide
//! LSP diagnostics based on the output of the command.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
ffi::OsString,

View file

@ -15,8 +15,7 @@ doctest = false
arrayvec = "0.7.2"
bitflags.workspace = true
cov-mark = "2.0.0-pre.1"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.4.0", features = ["raw-api"] }
dashmap.workspace = true
drop_bomb = "0.1.5"
either.workspace = true
fst = { version = "0.4.7", default-features = false }

View file

@ -421,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::TypeParamId(it) => {
@ -428,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id]),
db.span_map(src.file_id).as_ref(),
)
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),

View file

@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use base_db::FileId;
use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))),
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}

View file

@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<&[(syntax::TextRange, Name)]> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
}
/// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics
@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
format_args_template_map,
diagnostics,
} = self;
format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();

View file

@ -1025,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map;
self.expander.exit(self.db, mark);
self.expander.exit(mark);
id
}
None => collector(self, None),
@ -1597,13 +1597,25 @@ impl ExprCollector<'_> {
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => {
format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
self.alloc_expr_desugared(Expr::Path(Path::from(name)))
})
}
None => FormatArgs { template: Default::default(), arguments: args.finish() },
Some((s, is_direct_literal)) => format_args::parse(
&s,
fmt_snippet,
args,
is_direct_literal,
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| {
if let Some(span) = span {
mappings.push((span, name.clone()))
}
},
),
None => FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: Default::default(),
},
};
// Create a list of all _unique_ (argument, format trait) combinations.
@ -1742,18 +1754,26 @@ impl ExprCollector<'_> {
});
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
statements: Box::default(),
// We collect the unused expressions here so that we still infer them instead of
// dropping them out of the expression tree
statements: fmt
.orphans
.into_iter()
.map(|expr| Statement::Expr { expr, has_semi: true })
.collect(),
tail: Some(unsafe_arg_new),
});
self.alloc_expr(
let idx = self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
)
);
self.source_map.format_args_template_map.insert(idx, mappings);
idx
}
/// Generate a hir expression for a format_args placeholder specification.

View file

@ -143,7 +143,6 @@ mod m {
#[test]
fn desugar_builtin_format_args() {
// Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
@ -161,7 +160,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"hello ", " ", " friends, we ", " ", "", "\"",
"hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(
@ -221,3 +220,115 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
//- minicore: fmt, from
//- /main.rs
mod error;
use crate::error::error;
fn main() {
// _ = forces body expansion instead of block def map expansion
_ = error!("Failed to resolve path `{}`", node.text());
}
//- /error.rs
macro_rules! _error {
($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
}
pub(crate) use _error as error;
macro_rules! _intermediate {
($arg:expr) => {$crate::error::SsrError::new($arg)}
}
pub(crate) use _intermediate as intermediate;
pub struct SsrError(pub(crate) core::fmt::Arguments);
impl SsrError {
pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
SsrError(message.into())
}
}
"##,
);
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
expect![[r#"
fn main() {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
"Failed to resolve path `", "`",
],
&[
builtin#lang(Argument::new_display)(
&node.text(),
),
],
&[
builtin#lang(Placeholder::new)(
0usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
),
],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn regression_10300() {
let (db, body, def) = lower(
r#"
//- minicore: concat, panic
mod private {
pub use core::concat;
}
macro_rules! m {
() => {
panic!(concat!($crate::private::concat!("cc")));
};
}
fn f() {
m!();
}
"#,
);
let (_, source_map) = db.body_with_source_map(def.into());
assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) {
assert_eq!(def_map.diagnostics(), &[]);
}
expect![[r#"
fn f() {
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
&[
"cc",
],
&[],
&[],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}

View file

@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
MacroCallKind::Attr {
ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site,
expand_to,
self.expander.module.krate(),
resolver,
@ -793,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter);
self.expander.exit(self.db, mark);
self.expander.exit(mark);
}
}

View file

@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::{
attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId, UnresolvedMacro,
attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
MacroId, ModuleId,
MacroId, ModuleId, UnresolvedMacro,
};
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
hygiene: Hygiene,
span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0,
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
hygiene: Hygiene::new(db.upcast(), current_file_id),
span_map: db.span_map(current_file_id),
krate: module.krate,
}
}
@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
pub fn exit(&mut self, mut mark: Mark) {
self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@ -110,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
LowerCtx::new(db, &self.hygiene, self.current_file_id)
LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@ -118,7 +118,7 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
@ -130,8 +130,8 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(
@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?;
self.recursion_depth += 1;
self.hygiene = Hygiene::new(db.upcast(), file_id);
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark {
file_id: old_file_id,
span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)]
pub struct Mark {
file_id: HirFileId,
span_map: SpanMap,
bomb: DropBomb,
}

View file

@ -586,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
use hir_expand::hygiene::Hygiene;
use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode;
use crate::test_db::TestDB;
@ -608,7 +608,8 @@ mod tests {
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
let mod_path =
ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db);
let resolved = def_map

View file

@ -21,7 +21,7 @@ use crate::{
db::DefDatabase,
dyn_map::{keys, DynMap},
expander::Expander,
item_tree::{AttrOwner, ItemTree},
item_tree::ItemTree,
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource},
@ -250,7 +250,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs)
@ -275,7 +278,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
mut add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
for type_or_const_param in params.type_or_const_params() {
match type_or_const_param {
@ -297,7 +303,7 @@ impl GenericParams {
type_param.type_bound_list(),
Either::Left(type_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@ -310,7 +316,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param),
};
let idx = self.type_or_consts.alloc(param.into());
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
}
}
}
@ -325,7 +331,7 @@ impl GenericParams {
lifetime_param.type_bound_list(),
Either::Right(lifetime_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
}
}
@ -433,7 +439,7 @@ impl GenericParams {
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
exp.1.exit(db, mark);
exp.1.exit(mark);
}
}
});
@ -518,7 +524,7 @@ fn file_id_and_params_of(
(src.file_id, src.value.generic_param_list())
}
// We won't be using this ID anyway
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
}
}

View file

@ -3,9 +3,10 @@ use std::mem;
use hir_expand::name::Name;
use rustc_dependencies::parse_format as parse;
use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
AstToken, SmolStr, TextRange,
SmolStr, TextRange, TextSize,
};
use crate::hir::ExprId;
@ -14,6 +15,7 @@ use crate::hir::ExprId;
pub struct FormatArgs {
pub template: Box<[FormatArgsPiece]>,
pub arguments: FormatArguments,
pub orphans: Vec<ExprId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -170,15 +172,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
let text = s.text();
let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
(raw != 0).then_some(raw as usize)
// subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1)
}
None => None,
};
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@ -193,12 +198,17 @@ pub(crate) fn parse(
let is_source_literal = parser.is_source_literal;
if !parser.errors.is_empty() {
// FIXME: Diagnose
return FormatArgs { template: Default::default(), arguments: args.finish() };
return FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: vec![],
};
}
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@ -230,9 +240,10 @@ pub(crate) fn parse(
Err(index)
}
}
ArgRef::Name(name, _span) => {
ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
record_usage(name, span);
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
@ -246,6 +257,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795)
// FIXME: Diagnose
}
record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
@ -413,7 +425,11 @@ pub(crate) fn parse(
// FIXME: Diagnose
}
FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
FormatArgs {
template: template.into_boxed_slice(),
arguments: args.finish(),
orphans: unused.into_iter().map(TupleExt::head).collect(),
}
}
#[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
@ -401,6 +402,14 @@ impl ItemScope {
})
}
pub fn derive_macro_invoc(
&self,
ast_id: AstId<ast::Adt>,
attr_id: AttrId,
) -> Option<MacroCallId> {
Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
}
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)

View file

@ -42,12 +42,11 @@ use std::{
};
use ast::{AstNode, HasName, StructKind};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name},
ExpandTo, HirFileId, InFile,
};
@ -118,7 +117,7 @@ impl ItemTree {
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file)
},
ast::MacroItems(items) => {
@ -749,6 +748,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -778,9 +778,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
let (_, source_map) =
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
let span_map = db.span_map(file_id);
let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree");
source_map[index].clone()
}
/// Maps a `UseTree` contained in this import back to its AST node.
@ -793,8 +793,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
let span_map = db.span_map(file_id);
lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree")
.1
}
}

View file

@ -2,12 +2,13 @@
use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
LocalLifetimeParamId, LocalTypeOrConstParamId,
};
use super::*;
@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
}
}
pub(super) fn hygiene(&self) -> &Hygiene {
self.body_ctx.hygiene()
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.body_ctx.span_map()
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
}
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
let item: ModItem = match item {
let mod_item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
};
let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
self.add_attrs(item.into(), attrs);
Some(item)
Some(mod_item)
}
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
}
}
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
match item {
fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
}
}?;
let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
self.add_attrs(
match item {
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
},
attrs,
);
Some(item)
}
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res)))
}
@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &field, self.span_map()),
);
}
}
let end = self.next_field_idx();
@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
}
let end = self.next_field_idx();
IdxRange::new(start..end)
@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
};
let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res)))
}
@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
};
let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res)))
}
@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data);
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
);
}
}
@ -303,28 +317,29 @@ impl<'a> Ctx<'a> {
});
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
);
has_self_param = true;
}
for param in param_list.params() {
let ast_id = self.source_ast_id_map.ast_id(&param);
let idx = match param.dotdotdot_token() {
Some(_) => {
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) })
}
Some(_) => self
.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &param, self.span_map()),
);
}
}
let end_param = self.next_param_idx();
@ -394,8 +409,8 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res)))
}
@ -443,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def
.assoc_item_list()
.into_iter()
.flat_map(|list| list.assoc_items())
.filter_map(|item| {
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.lower_assoc_item(&item).map(|item| {
self.add_attrs(ModItem::from(item).into(), attrs);
item
})
})
.filter_map(|item_node| self.lower_assoc_item(&item_node))
.collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@ -472,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def,
);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@ -499,14 +509,8 @@ impl<'a> Ctx<'a> {
.assoc_item_list()
.into_iter()
.flat_map(|it| it.assoc_items())
.filter_map(|item| {
let assoc = self.lower_assoc_item(&item)?;
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.add_attrs(ModItem::from(assoc).into(), attrs);
Some(assoc)
})
.filter_map(|item| self.lower_assoc_item(&item))
.collect();
let ast_id = self.source_ast_id_map.ast_id(impl_def);
let res =
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res)))
@ -515,7 +519,7 @@ impl<'a> Ctx<'a> {
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res)))
@ -537,10 +541,16 @@ impl<'a> Ctx<'a> {
}
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
let span_map = self.span_map();
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { path, ast_id, expand_to };
let res = MacroCall {
path,
ast_id,
expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
};
Some(id(self.data().macro_calls.alloc(res)))
}
@ -572,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
let id: ModItem = match item {
ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
let mod_item: ModItem = match &item {
ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
self.add_attrs(id.into(), attrs);
Some(id)
let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
Some(mod_item)
})
.collect()
});
@ -612,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
let add_param_attrs = |item, param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
match self.tree.attrs.entry(item) {
match self.tree.attrs.entry(match item {
Either::Right(id) => id.into(),
Either::Left(id) => id.into(),
}) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
@ -643,7 +657,8 @@ impl<'a> Ctx<'a> {
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
let vis =
RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis)
}
@ -721,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase,
hygiene: &'a Hygiene,
span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>,
}
@ -734,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@ -753,7 +768,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None,
};
let alias = tree.rename().map(|a| {
@ -789,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree(
db: &dyn DefDatabase,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> {
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping))
}

View file

@ -457,7 +457,7 @@ impl Printer<'_> {
}
}
ModItem::MacroCall(it) => {
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast()));
}
ModItem::MacroRules(it) => {

View file

@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>;
"#]],
)
}
#[test]
fn pub_self() {
check(
r#"
pub(self) struct S;
"#,
expect![[r#"
pub(self) struct S;
"#]],
)
}

View file

@ -7,7 +7,7 @@
//! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
@ -63,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe},
};
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
@ -72,19 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
hygiene::Hygiene,
name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, UnresolvedMacro,
MacroDefId, MacroDefKind,
};
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use stdx::impl_from;
use syntax::ast;
use syntax::{ast, AstNode};
use ::tt::token_id as tt;
pub use hir_expand::tt;
use crate::{
builtin_type::BuiltinType,
@ -1166,16 +1165,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let h = Hygiene::new(db, self.file_id);
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
let span_map = db.span_map(self.file_id);
let path =
self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site,
expands_to,
krate,
resolver,
@ -1200,17 +1203,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1222,7 +1227,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
expand_eager_macro_input(db, krate, macro_call, def, &|path| {
expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like)
})
}
@ -1231,6 +1236,7 @@ fn macro_call_as_call_id_with_eager(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
call_site,
)),
err: None,
},
@ -1315,6 +1321,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1329,6 +1336,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
call_site,
);
Ok((macro_id, def_id, call_id))
}
@ -1341,15 +1349,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => (
{
let mut tt = tt.0.clone();
tt.delimiter = tt::Delimiter::UNSPECIFIED;
tt
},
tt.1.clone(),
),
_ => (tt::Subtree::empty(), Default::default()),
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
Some(tt)
}
_ => None,
};
def.as_lazy_macro(
@ -1357,11 +1363,18 @@ fn attr_macro_as_call_id(
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: Arc::new(arg),
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.ctxt,
)
}
#[derive(Debug)]
pub struct UnresolvedMacro {
pub path: hir_expand::mod_path::ModPath,
}
intern::impl_internable!(
crate::type_ref::TypeRef,
crate::type_ref::TraitRef,

View file

@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode},
hygiene::Hygiene,
span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile,
};
use syntax::ast;
@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase,
hygiene: Hygiene,
span_map: SpanMap,
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
}
impl<'a> LowerCtx<'a> {
pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
}
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx {
db,
hygiene: Hygiene::new(db.upcast(), file_id),
span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())),
}
}
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
LowerCtx { db, span_map, ast_id_map: None }
}
pub(crate) fn hygiene(&self) -> &Hygiene {
&self.hygiene
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.as_ref()
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, self)
Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {

View file

@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
"##,
expect![[r##"
expect![[r#"
#[rustc_builtin_macro]
macro_rules! concat_bytes {}
fn main() { [b'A', 66, 67, 68, b'E', 70]; }
"##]],
"#]],
);
}

View file

@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() {
check(
r#"
// +tokenids
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
@ -24,26 +23,22 @@ macro_rules! f {
};
}
// +tokenids
// +spans+syntaxctxt
f!(struct MyTraitMap2);
"#,
expect![[r##"
// call ids will be shifted by Shift(30)
// +tokenids
macro_rules! f {#0
(#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
struct#10 $#11ident#12 {#13
map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
}#13
}#9;#29
}#0
expect![[r#"
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
map: ::std::collections::HashSet<()>,
}
};
}
// // +tokenids
// f!(struct#1 MyTraitMap2#2);
struct#10 MyTraitMap2#32 {#13
map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
}#13
"##]],
struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
}#FileId(0):1@132..133\2#
"#]],
);
}
@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
// +tokenids
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// +tokenids
// +spans+syntaxctxt
f! {
fn main() {
1;
1.0;
((1,),).0.0;
let x = 1;
}
}
"#,
expect![[r##"
// call ids will be shifted by Shift(18)
// +tokenids
macro_rules! f {#0
(#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
$#12(#13$#14tt#15)#13*#16
}#11;#17
}#0
expect![[r#"
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// // +tokenids
// f! {
// fn#1 main#2() {
// 1#5;#6
// 1.0#7;#8
// let#9 x#10 =#11 1#12;#13
// }
// }
fn#19 main#20(#21)#21 {#22
1#23;#24
1.0#25;#26
let#27 x#28 =#29 1#30;#31
}#22
fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
(#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
}#FileId(0):2@110..111\0#
"##]],
"#]],
);
}
@ -105,59 +93,115 @@ fn eager_expands_with_unresolved_within() {
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
concat!("hello", identity!("world"), unresolved!(), identity!("!"));
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
/* error: unresolved macro unresolved */"helloworld!";
}
"##]],
);
}
#[test]
fn token_mapping_eager() {
fn concat_spans() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
}
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
// format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
}
}
"##]],
);
}
#[test]
fn token_mapping_across_files() {
check(
r#"
//- /lib.rs
#[macro_use]
mod foo;
mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
//- /foo.rs
macro_rules! mk_struct {
($foo:ident with $ty:ty) => { struct $foo($ty); }
}
"#,
expect![[r#"
#[macro_use]
mod foo;
struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
"#]],
);
}
#[test]
fn float_field_access_macro_input() {
check(

View file

@ -1004,3 +1004,29 @@ fn main() {
"##]],
);
}
#[test]
fn eager_concat_bytes_panic() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = concat_bytes!(2);
}
"#,
expect![[r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = /* error: unexpected token in input */[];
}
"#]],
);
}

View file

@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
db::{DeclarativeMacroExpander, ExpandDatabase},
AstId, InFile, MacroFile,
};
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt};
use stdx::format_to;
use syntax::{
ast::{self, edit::IndentLevel},
AstNode, SyntaxElement,
SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, TextRange, T,
AstNode,
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, T,
};
use tt::token_id::{Subtree, TokenId};
use crate::{
db::DefDatabase,
@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver,
src::HasSource,
test_db::TestDB,
tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId,
};
@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new();
let mut expansions = Vec::new();
for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
let mut show_token_ids = false;
for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_token_ids |= comment.to_string().contains("+tokenids");
}
if !show_token_ids {
continue;
}
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
let DeclarativeMacroExpander { mac, def_site_token_map } =
&*db.decl_macro_expander(krate, ast_id);
assert_eq!(mac.err(), None);
let tt = match &macro_ {
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
ast::Macro::MacroDef(_) => unimplemented!(""),
};
let tt_start = tt.syntax().text_range().start();
tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
|token| {
let range = token.text_range().checked_sub(tt_start).unwrap();
if let Some(id) = def_site_token_map.token_by_range(range) {
let offset = (range.end() + tt_start).into();
text_edits.push((offset..offset, format!("#{}", id.0)));
}
},
);
text_edits.push((
call_offset..call_offset,
format!("// call ids will be shifted by {:?}\n", mac.shift()),
));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call
@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
})
.unwrap();
let macro_call_id = res.value.unwrap();
let macro_file = MacroFile { macro_call_id };
let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
expansions.push((macro_call.value.clone(), expansion_result));
}
for (call, exp, arg) in expansions.into_iter().rev() {
for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
let mut show_token_ids = false;
let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
show_token_ids |= comment.to_string().contains("+tokenids");
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let mut expn_text = String::new();
@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else {
assert!(
parse.errors().is_empty(),
"parse errors in expansion: \n{:#?}",
parse.errors()
"parse errors in expansion: \n{:#?}\n```\n{}\n```",
parse.errors(),
parse.syntax_node(),
);
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
show_token_ids.then_some(&*token_map),
SpanMapRef::ExpansionSpanMap(&token_map),
show_spans,
show_ctxt,
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let range = call.syntax().text_range();
let range: Range<usize> = range.into();
if show_token_ids {
if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
for (range, id) in ranges {
let idx = (tt_range.start() + range.end()).into();
text_edits.push((idx..idx, format!("#{}", id.0)));
}
}
text_edits.push((range.start..range.start, "// ".into()));
call.to_string().match_indices('\n').for_each(|(offset, _)| {
let offset = offset + 1 + range.start;
text_edits.push((offset..offset, "// ".into()));
});
text_edits.push((range.end..range.end, "\n".into()));
text_edits.push((range.end..range.end, expn_text));
} else {
text_edits.push((range, expn_text));
}
text_edits.push((range, expn_text));
}
text_edits.sort_by_key(|(range, _)| range.start);
@ -226,19 +170,43 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
let pp = pretty_print_macro_expansion(src.value, None);
format_to!(expanded_text, "\n{}", pp)
if let Some(file_id) = src.file_id.macro_file() {
if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
let call = file_id.call_node(&db);
let mut show_spans = false;
let mut show_ctxt = false;
for comment in
call.value.children_with_tokens().filter(|it| it.kind() == COMMENT)
{
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp)
}
}
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
format_to!(expanded_text, "\n{}", pp)
if let Some(macro_file) = src.file_id.macro_file() {
if macro_file.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(macro_file.into()).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp)
}
}
}
@ -246,20 +214,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
tree.token_trees.iter().for_each(|tree| match tree {
tt::TokenTree::Leaf(leaf) => {
let id = match leaf {
tt::Leaf::Literal(it) => it.span,
tt::Leaf::Punct(it) => it.span,
tt::Leaf::Ident(it) => it.span,
};
ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
}
tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
});
}
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@ -276,7 +230,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res
}
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
fn pretty_print_macro_expansion(
expn: SyntaxNode,
map: SpanMapRef<'_>,
show_spans: bool,
show_ctxt: bool,
) -> String {
let mut res = String::new();
let mut prev_kind = EOF;
let mut indent_level = 0;
@ -322,10 +281,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
}
prev_kind = curr_kind;
format_to!(res, "{}", token);
if let Some(map) = map {
if let Some(id) = map.token_by_range(token.text_range()) {
format_to!(res, "#{}", id.0);
if show_spans || show_ctxt {
let span = map.span_for_range(token.text_range());
format_to!(res, "#");
if show_spans {
format_to!(
res,
"{:?}:{:?}@{:?}",
span.anchor.file_id,
span.anchor.ast_id.into_raw(),
span.range,
);
}
if show_ctxt {
format_to!(res, "\\{}", span.ctx);
}
format_to!(res, "#");
}
}
res
@ -342,6 +313,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);

View file

@ -93,6 +93,41 @@ fn foo() {
);
}
#[test]
fn macro_rules_in_attr() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
r#"
//- proc_macros: identity
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
id! {
#[proc_macros::identity]
impl Foo for WrapBj {
async fn foo(&self) {
self.id().await;
}
}
}
"#,
expect![[r#"
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
#[proc_macros::identity] impl Foo for WrapBj {
async fn foo(&self ) {
self .id().await ;
}
}
"#]],
);
}
#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
#[test]
fn float_attribute_mapping() {
check(
r#"
//- proc_macros: identity
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
"#,
expect![[r#"
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
}#FileId(0):1@76..77\0#"#]],
);
}

View file

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem};
use base_db::{CrateId, Dependency, Edition, FileId};
use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
hygiene::Hygiene,
name::{name, AsName, Name},
proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate()
.map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string.
let name =
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
let name = tt::Ident {
text: it.name.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
};
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect())
@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro,
hygienes: FxHashMap::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind {
FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
FnLike {
ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
call_site: SyntaxContextId,
},
Derive {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
call_site: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr: Attr,
mod_item: ModItem,
/* is this needed? */ tree: TreeId,
},
}
/// Walks the tree of module recursively
@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
/// `Hygiene` cache, because `Hygiene` construction is expensive.
///
/// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
/// However, `DefCollector` still needs to lower paths in attributes, in particular those in
/// derive meta item list.
hygienes: FxHashMap<HirFileId, Hygiene>,
}
impl DefCollector<'_> {
@ -315,12 +330,11 @@ impl DefCollector<'_> {
}
if *attr_name == hir_expand::name![feature] {
let hygiene = &Hygiene::new_unhygienic();
let features = attr
.parse_path_comma_token_tree(self.db.upcast(), hygiene)
.parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
.filter_map(|feat| match feat.segments() {
.filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
resolver_def_id,
@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false;
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
*call_site,
self.def_map.krate,
resolver,
);
@ -1212,7 +1228,7 @@ impl DefCollector<'_> {
};
if matches!(
def,
MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive()
) {
// Resolved to `#[derive]`
@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
};
let ast_id = ast_id.with_value(ast_adt_id);
let extend_unhygenic;
let hygiene = if file_id.is_macro() {
self.hygienes
.entry(file_id)
.or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
} else {
// Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
// when we're in an oridinary (non-macro) file.
extend_unhygenic = Hygiene::new_unhygienic();
&extend_unhygenic
};
match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
for (idx, path) in derive_macros.enumerate() {
for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
call_site,
},
container: directive.container,
});
@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros {
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
|path| {
@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
));
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new();
let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros.
self.def_collector.import_macros_from_extern_crate(
@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
);
return;
};
for path in paths {
for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
// FIXME: a hacky way to create a Name from string.
name =
tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
name = tt::Ident {
text: it.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
}
.as_name();
&name
}
None => {
@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
}
}
fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
fn collect_macro_call(
&mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
container: ItemContainerId,
) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
mac.expand_to,
call_site,
expand_to,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container,
});
}
@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro: false,
hygienes: FxHashMap::default(),
};
collector.seed_with_top_level();
collector.collect();

View file

@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name;
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use syntax::SmolStr;
@ -66,14 +66,14 @@ impl ModDir {
attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
let orig_file_id = file_id.original_file(db.upcast());
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path {
Some(attr_path) => {
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
}
None if file_id.is_include_macro(db.upcast()) => {
None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => {
candidate_files.push(format!("{}.rs", name.display(db.upcast())));
candidate_files.push(format!("{}/mod.rs", name.display(db.upcast())));
}

View file

@ -96,8 +96,8 @@ impl DefMap {
let types = result.take_types()?;
match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m),
// error: visibility needs to refer to module
_ => {
// error: visibility needs to refer to module
return None;
}
}

View file

@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::{db::DefDatabase, test_db::TestDB};
use super::DefMap;
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);

View file

@ -1,13 +1,19 @@
use base_db::SourceDatabaseExt;
use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc;
use crate::{db::DefDatabase, AdtId, ModuleDefId};
use super::*;
use crate::{
db::DefDatabase,
nameres::tests::{TestDB, WithFixture},
AdtId, ModuleDefId,
};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.test_crate();
let krate = {
let crate_graph = db.crate_graph();
// Some of these tests use minicore/proc-macros which will be injected as the first crate
crate_graph.iter().last().unwrap()
};
{
let events = db.log_executed(|| {
db.crate_def_map(krate);
@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
fn typing_inside_a_function_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- /lib.rs
mod foo;$0
//- /lib.rs
mod foo;$0
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 {
1 + 1
}
fn foo() -> i32 {
1 + 1
}
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
//- /foo/bar.rs
pub struct Baz;
",
r"
mod foo;
mod foo;
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 { 92 }
fn foo() -> i32 { 92 }
#[cfg(never)]
fn no() {}
",
#[cfg(never)]
fn no() {}
",
);
}
#[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() {
let (mut db, pos) = TestDB::with_position(
check_def_map_is_not_recomputed(
r"
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
m!(X);
",
//- /foo/bar.rs
$0
m!(X);
pub struct S {}
",
r"
m!(Y);
pub struct S {}
",
);
let krate = db.test_crate();
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
}
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
#[test]
fn typing_inside_an_attribute_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity]
fn f() { foo }
",
);
}
#[test]
fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity(foo)]
fn f() {}
",
);
}
#[test]
fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity, derive_identity
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn fm() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
fn f() {}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
r"
fn f() {0}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
);
}
#[test]
fn typing_inside_a_derive_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: derive_identity
//- minicore:derive
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[derive(proc_macros::DeriveIdentity)]
#[allow()]
struct S;
",
r"
#[derive(proc_macros::DeriveIdentity)]
#[allow(dead_code)]
struct S;
",
);
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_expansions() {

View file

@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
lower::lower_path(path, ctx)
pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.

View file

@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
use either::Either;
use hir_expand::name::{name, AsName};
use hir_expand::{
mod_path::resolve_crate_root,
name::{name, AsName},
};
use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds};
@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
let hygiene = ctx.hygiene();
let span_map = ctx.span_map();
loop {
let segment = path.segment()?;
@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name
match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
Either::Left(name) => {
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
Either::Right(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
if name_ref.text() == "$crate" {
break kind = resolve_crate_root(
ctx.db.upcast(),
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate);
}
let name = name_ref.as_name();
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]);
@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)?
Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
kind = PathKind::DollarCrate(crate_id);
let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
}
}
}

View file

@ -588,6 +588,14 @@ impl Resolver {
_ => None,
})
}
pub fn impl_def(&self) -> Option<ImplId> {
self.scopes().find_map(|scope| match scope {
Scope::ImplDefScope(def) => Some(*def),
_ => None,
})
}
/// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
#[must_use]
pub fn update_to_inner_scope(

View file

@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}

View file

@ -2,7 +2,7 @@
use std::iter;
use hir_expand::{hygiene::Hygiene, InFile};
use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap;
use syntax::ast;
use triomphe::Arc;
@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>,
) -> RawVisibility {
Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
}
pub(crate) fn from_ast_with_hygiene(
pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
}
pub(crate) fn from_ast_with_hygiene_and_default(
pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
default: RawVisibility,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
let node = match node {
None => return default,
@ -57,7 +57,7 @@ impl RawVisibility {
};
match node.kind() {
ast::VisibilityKind::In(path) => {
let path = ModPath::from_src(db.upcast(), path, hygiene);
let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path {
None => return RawVisibility::private(),
Some(path) => path,
@ -73,7 +73,7 @@ impl RawVisibility {
RawVisibility::Module(path)
}
ast::VisibilityKind::PubSelf => {
let path = ModPath::from_kind(PathKind::Plain);
let path = ModPath::from_kind(PathKind::Super(0));
RawVisibility::Module(path)
}
ast::VisibilityKind::Pub => RawVisibility::Public,

View file

@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
use la_arena::{Arena, Idx};
use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::db;
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = crate::InFile<FileAstId<N>>;
impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
}
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
db.ast_id_map(self.file_id).get(self.value)
}
}
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId {
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value)
}
}
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
// make sure to allocate the root node
if !should_alloc_id(node.kind()) {
res.alloc(node);
}
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
true
TreeOrder::BreadthFirst
} else {
false
TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@ -155,6 +187,11 @@ impl AstIdMap {
res
}
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum TreeOrder {
BreadthFirst,
DepthFirst,
}
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
/// `f` returns true are visited breadth-first, all the other nodes are explored
/// depth-first.
/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
if f(node.clone()) {
if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}

View file

@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
}),
})
.collect::<Vec<_>>();
// FIXME: use `Arc::from_iter` when it becomes available
let entries: Arc<[Attr]> = Arc::from(entries);
pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}),
});
let entries: Arc<[Attr]> = Arc::from_iter(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>,
span_map: SpanMapRef<'_>,
) -> Self {
Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@ -71,19 +76,13 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
entries: Some(Arc::from(
a.iter()
.cloned()
.chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.id = it.id.ast_index() as u32 + last_ast_index
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
<< AttrId::AST_INDEX_BITS;
it
}))
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
)),
entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.id = it.id.ast_index() as u32 + last_ast_index
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
<< AttrId::AST_INDEX_BITS;
it
})))),
}
}
}
@ -100,51 +99,43 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
let new_attrs = Arc::from(
self.iter()
.flat_map(|attr| -> SmallVec<[_; 1]> {
let is_cfg_attr =
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
if !is_cfg_attr {
return smallvec![attr.clone()];
}
let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
let is_cfg_attr =
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
if !is_cfg_attr {
return smallvec![attr.clone()];
}
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![attr.clone()],
};
let index = attr.id;
let attrs =
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
let tree = Subtree {
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: attr.to_vec(),
};
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
});
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![attr.clone()],
};
let index = attr.id;
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|(idx, attr)| {
let tree = Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: attr.to_vec(),
};
// FIXME hygiene
let hygiene = Hygiene::new_unhygienic();
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
},
);
let cfg_options = &crate_graph[krate].cfg_options;
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
let cfg_options = &crate_graph[krate].cfg_options;
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect()
}
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
attrs.collect()
}
}));
RawAttrs { entries: Some(new_attrs) }
}
@ -185,21 +176,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
// FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(tt) => tt.0.fmt(f),
AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@ -208,10 +201,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@ -219,24 +212,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
Some(Attr { id, path, input })
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
fn from_tt(
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
// FIXME: Unecessary roundtrip tt -> ast -> tt
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@ -256,7 +245,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@ -267,7 +256,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => Some(&tt.0),
AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@ -276,8 +265,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
hygiene: &'a Hygiene,
) -> Option<impl Iterator<Item = ModPath> + 'a> {
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -290,12 +278,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: tts.into_iter().cloned().collect(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: tts.to_vec(),
};
let (parse, _) =
let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@ -304,7 +293,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
ModPath::from_src(db, path, hygiene)
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,
))
});
Some(paths)

View file

@ -1,16 +1,22 @@
//! Builtin attributes.
use base_db::{
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
pub fn expand(
pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
register_builtin! {
register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
/// #[Foo]
/// #[bar::Bar]
/// ();
/// #![Foo]
/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
_ => return ExpandResult::ok(tt::Subtree::empty()),
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
attr_args
}
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
pseudo_derive_attr_expansion(tt, derives)
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span: tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
}))
};
@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
token_trees.push(mk_leaf('('));
token_trees.push(mk_leaf(')'));
token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}

View file

@ -1,16 +1,16 @@
//! Builtin derives.
use ::tt::Ident;
use base_db::{CrateOrigin, LangCrateOrigin};
use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
tt::{self, TokenId},
span::SpanMapRef,
tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
token_map: &TokenMap,
token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
expander(db, id, tt, token_map)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
self.as_pattern_map(path, |it| quote!(#it))
fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
self.as_pattern_map(path, span, |it| quote!(span => #it))
}
fn field_names(&self) -> Vec<tt::Ident> {
fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
quote! { #it : #mapped , }
quote! {span => #it : #mapped , }
});
quote! {
quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
let fields = tuple_field_iterator(n).map(|it| {
let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
quote! {
quote! {span =>
#mapped ,
}
});
quote! {
quote! {span =>
#path ( ##fields )
}
}
@ -117,7 +121,7 @@ impl VariantShape {
}
}
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(#it))
fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
vec![s.field_names()]
vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
variants.iter().map(|(_, fields)| fields.field_names()).collect()
variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
vec![s.as_pattern_map(quote! { #name }, field_map)]
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
.map(|(v, fields)| {
fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
})
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
vec![quote! { un }]
vec![quote! {span => un }]
}
}
}
@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
fn parse_adt(
tm: SpanMapRef<'_>,
adt: &ast::Adt,
call_site: SpanData,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax()).0
mbe::syntax_node_to_token_tree(it.syntax(), tm)
}
None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
.unwrap_or_else(tt::Subtree::empty);
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
.unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
});
Some(ty)
} else {
None
@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
let name_token = name_to_token(&tm, name)?;
let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
fn name_to_token(
token_map: SpanMapRef<'_>,
name: Option<ast::Name>,
) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
let span = token_map.span_for_range(name.syntax().text_range());
let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
// FIXME: use
invoc_span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tm, tt) {
let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
e,
)
}
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
where_block.push(quote! { #ident : #b , });
where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , })
(quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
(quote! { #ident : #bound , }, quote! { #ident_ , })
(quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
quote! { #it : #bound , }
quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
let expanded = quote! {
let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
quote! { crate }
quote! {span => crate }
} else {
quote! { core }
quote! {span => core }
};
tt.token_trees[0].clone()
@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
let patterns = adt.shape.as_pattern(name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
let patterns = adt.shape.as_pattern(span, name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr,
}
});
quote! {
quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@ -451,53 +469,56 @@ fn clone_expand(
})
}
/// This function exists since `quote! { => }` doesn't work.
fn fat_arrow() -> ::tt::Subtree<TokenId> {
let eq =
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #eq> }
/// This function exists since `quote! {span => => }` doesn't work.
fn fat_arrow(span: SpanData) -> tt::Subtree {
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
quote! {span => #eq> }
}
/// This function exists since `quote! { && }` doesn't work.
fn and_and() -> ::tt::Subtree<TokenId> {
let and =
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #and& }
/// This function exists since `quote! {span => && }` doesn't work.
fn and_and(span: SpanData) -> tt::Subtree {
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
fields
.as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
fields.as_pattern_map(
quote!(span =>#name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
quote!(#adt_name :: #name),
|_| quote!(#krate::default::Default::default()),
quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
};
quote! {
quote! {span =>
fn default() -> Self {
#body
}
@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
quote! {
quote! {span =>
.field(#x_string, & #it)
}
});
quote! {
quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
let for_fields = tuple_field_iterator(*n).map(|it| {
quote! {
let for_fields = tuple_field_iterator(span, *n).map(|it| {
quote! {span =>
.field( & #it)
}
});
quote! {
quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
VariantShape::Unit => quote! {
VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let name = &adt.name;
let pat = fields.as_pattern(quote!(#name));
let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
vec![quote! { #pat #fat_arrow #expr }]
vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
let pat = v.as_pattern(quote!(#adt_name :: #name));
let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
quote! {
quote! {span =>
#pat #fat_arrow #expr ,
}
})
@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
quote! {
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
quote! { {
##it
} }
};
let fat_arrow = fat_arrow();
quote! {
#pat #fat_arrow #expr ,
}
},
);
let arms =
adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
|(pat, names)| {
let expr = {
let it =
names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
quote! {span => {
##it
} }
};
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr ,
}
},
);
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
quote! {}
quote! {span =>}
};
quote! {
quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
quote!(true)
quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
let t1 = Ident::new(format!("{}_self", it.text), it.span);
let t2 = Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and(span);
quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
let t1 = Ident::new(format!("{}_self", first.text), first.span);
let t2 = Ident::new(format!("{}_other", first.text), first.span);
quote!(#t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
quote!(span =>#t1 .eq( #t2 ))
};
quote!(#first ##rest)
quote!(span =>#first ##rest)
}
};
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
let other_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
let self_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
quote!(span =>#t)
},
span,
);
let other_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
quote!(span =>#t)
},
span,
);
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::cmp::Ordering::Equal);
let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let mut body = quote! {
let fat_arrow = fat_arrow(span);
let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body);
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body, span);
}
quote! {
quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
let mut body =
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
quote! {
quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
span,
);
quote! {
quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}

View file

@ -1,17 +1,24 @@
//! Builtin macro
use base_db::{AnchoredPath, Edition, FileId};
use base_db::{
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr;
use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
use itertools::Itertools;
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
MacroCallLoc,
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
name, quote,
tt::{self, DelimSpan},
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@ -36,7 +43,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
expander(db, id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -44,13 +54,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
expander(db, arg_id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -109,29 +122,44 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
fn mk_pound(span: SpanData) -> tt::Subtree {
crate::quote::IntoTt::to_subtree(
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#',
spacing: crate::tt::Spacing::Alone,
span: span,
})
.into()],
span,
)
}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
ExpandResult::ok(quote! { "module::path" })
ExpandResult::ok(quote! {span =>
"module::path"
})
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
// Note that `line!` and `column!` will never be implemented properly, as they are by definition
// not incremental
ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(),
span: tt::Span::UNSPECIFIED,
span,
}))],
})
}
@ -140,26 +168,29 @@ fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
let expanded = quote! {
let expanded = quote! {span =>
#pretty
};
@ -170,27 +201,29 @@ fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {{
quote! {span =>{
if !(#cond) {
#DOLLAR_CRATE::panic!(##panic_args);
#dollar_crate::panic!(##panic_args);
}
}}
}
[] => quote! {{}},
[] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@ -200,12 +233,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
let expanded = quote! {
let expanded = quote! {span =>
#file_name
};
@ -216,16 +250,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "")
format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n")
format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@ -234,11 +270,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pound = quote! {@PUNCT '#'};
let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
return ExpandResult::ok(quote! {
return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@ -247,25 +284,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
let krate = DOLLAR_CRATE.clone();
literals.push(quote!(#krate::format_args!(#lit);));
let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
let pound = quote! {@PUNCT '#'};
let expanded = quote! {
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@ -277,20 +314,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
let expanded = if enabled { quote!(true) } else { quote!(false) };
let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@ -298,13 +337,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::panic_2021!)
quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::panic_2015!)
quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@ -316,13 +357,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@ -352,6 +395,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@ -361,13 +405,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
ExpandResult { value: quote! {}, err: Some(err) }
ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@ -407,13 +452,14 @@ fn concat_expand(
}
}
}
ExpandResult { value: quote!(#text), err }
ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@ -446,8 +492,25 @@ fn concat_bytes_expand(
}
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!([#ident]), err }
let value = tt::Subtree {
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
token_trees: {
Itertools::intersperse_with(
bytes.into_iter().map(|it| {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
}),
|| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span,
}))
},
)
.collect()
},
};
ExpandResult { value, err }
}
fn concat_bytes_expand_subtree(
@ -480,6 +543,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@ -494,8 +558,9 @@ fn concat_idents_expand(
}
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!(#ident), err }
// FIXME merge spans
let ident = tt::Ident { text: ident.into(), span };
ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@ -530,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
_tt: &tt::Subtree,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
match db.include_expand(arg_id) {
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(it) => it,
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
match parse_to_token_tree(
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),
) {
Some(it) => ExpandResult::ok(it),
None => ExpandResult::new(
tt::Subtree::empty(DelimSpan { open: span, close: span }),
ExpandError::other("failed to parse included file"),
),
}
}
pub(crate) fn include_arg_to_tt(
pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
let (subtree, map) =
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new((subtree, map)), file_id))
arg: &tt::Subtree,
) -> Result<FileId, ExpandError> {
relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
span: tt::TokenId::unspecified(),
span,
}))],
};
ExpandResult::ok(res)
@ -578,10 +646,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: we're not able to read excluded files (which is most of them because
@ -591,14 +662,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
return ExpandResult::ok(quote!(""));
return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
ExpandResult::ok(quote!(#text))
ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@ -610,10 +681,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
let mut err = None;
@ -630,7 +704,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
let expanded = quote! { #s };
let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@ -639,15 +713,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
None => quote! { ::core::option::Option::None::<&str> },
Some(s) => quote! { ::core::option::Option::Some(#s) },
None => quote! {span => ::core::option::Option::None::<&str> },
Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)

View file

@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, Edition, SourceDatabase};
use base_db::{
salsa::{self, debug::DebugQueryTable},
span::SyntaxContextId,
CrateId, Edition, FileId, SourceDatabase,
};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
ast_id_map::AstIdMap,
attrs::RawAttrs,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub def_site_token_map: mbe::TokenMap,
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(tt).map_err(Into::into),
None => self
.mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
.map_err(Into::into),
}
}
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
self.mac.map_id_down(token_id)
}
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
self.mac.map_id_up(token_id)
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
// FIXME: Get rid of these methods
impl TokenExpander {
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
fn setup_syntax_context_root(&self) -> ();
#[salsa::transparent]
fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg(
&self,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg_node(
&self,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
fn include_expand(
&self,
arg_id: MacroCallId,
) -> Result<
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
ExpandError,
>;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
}
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
#[inline]
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let token_range = token_to_map.text_range();
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
speculative_args,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
let (attr_arg, token_id) = match loc.kind {
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@ -210,59 +222,45 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
tree.delimiter = tt::Delimiter::unspecified();
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let shift = mbe::Shift::new(&tt);
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
Some(tree)
}
_ => (None, None),
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
match loc.def.kind {
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
}
_ => token_id,
_ => None,
}
}
_ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let call_site = loc.span(db);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
call_site,
call_site,
call_site,
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind())
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@ -333,51 +331,129 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
db.parse_macro_expansion(MacroFile { macro_call_id })
db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
}
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
> {
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
let ValueResult { value, err } = db.macro_arg_node(id);
let Some(arg) = value else {
return ValueResult { value: None, err };
// FIXME: consider the following by putting fixup info into eager call info args
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
let mismatched_delimiters = |arg: &SyntaxNode| {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
}
};
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
} else {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
match node.token_tree() {
Some(tt) => {
let tt = tt.syntax();
if let Some(e) = mismatched_delimiters(tt) {
return ValueResult::only_err(e);
}
tt.clone()
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
])));
}
}
}
MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone()
}
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
match err {
Some(err) => ValueResult::new(val, err),
None => ValueResult::ok(val),
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
errors => ValueResult::new(
Some((Arc::new(tt), undo_info)),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(Some((Arc::new(tt), undo_info)))
}
}
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
fn macro_arg_node(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
let err = || -> Arc<Box<[_]>> {
Arc::new(Box::new([SyntaxError::new_at_offset(
"invalid macro call".to_owned(),
syntax::TextSize::from(0),
)]))
};
let loc = db.lookup_intern_macro_call(id);
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
} else {
loc.kind
.arg(db)
.and_then(|arg| ast::TokenTree::cast(arg.value))
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
};
match res {
Some(res) if res.errors().is_empty() => res.syntax_node(),
Some(res) => {
return ValueResult::new(
Some(res.syntax_node().green().into()),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(res.errors().to_vec().into_boxed_slice()),
);
}
None => return ValueResult::only_err(err()),
}
} else {
match loc.kind.arg(db) {
Some(res) => res.value,
None => return ValueResult::only_err(err()),
}
};
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)])));
}
}
ValueResult::ok(Some(arg.green().into()))
}
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
}
};
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
let arg = db.macro_arg_node(id).value.unwrap();
let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node();
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(node).unwrap();
let mut res = expander.expand(db, id, &adt, &tmap);
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
res
// FIXME: Use censoring
let _censor = censor_for_macro_input(&loc, node.syntax());
expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
let ValueResult { value, err } = db.macro_arg(id);
let Some(macro_arg) = value else {
let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
let (arg, arg_tm, undo_info) = &*macro_arg;
let mut res = match loc.def.kind {
let arg = &*macro_arg;
match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
let mut arg = arg.clone();
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
return ExpandResult {
value: Arc::new(arg),
value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@ -600,12 +631,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, &arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
res
}
}
};
@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
let Some(macro_arg) = db.macro_arg(id).value else {
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
let ExpandResult { value: mut tt, err } =
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
let call_site = loc.span(db);
let ExpandResult { value: mut tt, err } = expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
// FIXME
call_site,
call_site,
// FIXME
call_site,
);
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
db.lookup_intern_macro_call(id).expand_to()
}
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
}
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View file

@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use base_db::{span::SyntaxContextId, CrateId};
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind,
span::SpanMapRef,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
let mut ws_mapping = FxHashMap::default();
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
}));
}
let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
&Hygiene::new(db, macro_call.file_id),
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
)
};
let err = parse_err.or(err);
if cfg!(debug_assertions) {
arg_map.finish();
}
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
let (mut subtree, expanded_eager_input_token_map) =
mbe::syntax_node_to_token_tree(&expanded_eager_input);
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
let mut ids_used = FxHashSet::default();
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// so we need to remap them to the original input of the eager macro.
subtree.visit_ids(&mut |id| {
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
if let Some(range) = expanded_eager_input_token_map
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
{
// remap from expanded eager input to eager input expansion
if let Some(og_range) = mapping.get(&range) {
// remap from eager input expansion to original eager input
if let Some(&og_range) = ws_mapping.get(og_range) {
if let Some(og_token) = og_tmap.token_by_range(og_range) {
ids_used.insert(og_token);
return og_token;
}
}
}
}
tt::TokenId::UNSPECIFIED
});
og_tmap.filter(|id| ids_used.contains(&id));
og_tmap
} else {
Default::default()
};
subtree.delimiter = crate::tt::Delimiter::unspecified();
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
arg: Arc::new((subtree, og_tmap)),
arg_id,
error: err.clone(),
})),
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
hygiene: &Hygiene,
span_map: &ExpansionSpanMap,
expanded_map: &mut ExpansionSpanMap,
mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
let mut offset = 0i32;
let apply_offset = |it: TextSize, offset: i32| {
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
};
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
let WalkEvent::Enter(child) = child else { continue };
let call = match child {
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
None => continue,
_ => continue,
},
syntax::NodeOrToken::Token(t) => {
mapping.insert(
TextRange::new(
apply_offset(t.text_range().start(), offset),
apply_offset(t.text_range().end(), offset),
),
t.text_range(),
);
WalkEvent::Enter(_) => continue,
WalkEvent::Leave(child) => {
if let SyntaxElement::Token(t) = child {
let start = t.text_range().start();
offset += t.text_range().len();
expanded_map.push(offset, span_map.span_at(start));
}
continue;
}
};
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
let def = match call
.path()
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
{
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
offset += call.syntax().text_range().len();
continue;
}
};
@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
call_site,
macro_resolver,
);
match value {
Some(call_id) => {
let ExpandResult { value, err: err2 } =
let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start =
apply_offset(call.syntax().text_range().start(), offset);
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
value
.1
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
let syntax_node = parse.syntax_node();
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
value: Some((
syntax_node.clone_for_update(),
offset + syntax_node.text_range().len(),
)),
err: err.or(err2),
}
}
@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
Some(db.decl_macro_expander(def.krate, ast_id))
} else {
None
};
lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
&tm,
expanded_map,
offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
call_site,
macro_resolver,
);
let err = err.or(error);
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start = apply_offset(call.syntax().text_range().start(), offset);
if let Some((_tt, arg_map, _)) = parse
.file_id
.macro_file()
.and_then(|id| db.macro_arg(id.macro_call_id).value)
.as_deref()
{
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
tm.first_range_by_token(
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
syntax::SyntaxKind::TOMBSTONE,
)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// FIXME: Do we need to re-use _m here?
ExpandResult { value: value.map(|(n, _m)| n), err }
ExpandResult { value, err }
}
};
if err.is_some() {
@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
return ExpandResult { value: value.zip(Some(mapping)), err: error };
return ExpandResult { value, err: error };
}
if let Some(insert) = value {
offset += u32::from(insert.text_range().len()) as i32
- u32::from(call.syntax().text_range().len()) as i32;
replacements.push((call, insert));
match value {
Some((insert, new_offset)) => {
replacements.push((call, insert));
offset = new_offset;
}
None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
ExpandResult { value: Some((original, mapping)), err: error }
ExpandResult { value: Some((original, offset)), err: error }
}

View file

@ -0,0 +1,375 @@
//! Things to wrap other things in file ids.
use std::iter;
use base_db::{
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
FileId, FileRange,
};
use either::Either;
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
use crate::{db, ExpansionInfo, MacroFileIdExt};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
/// Typical usages are:
///
/// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `InFile<ast::FnDef>` -- ast node in a file
/// * `InFile<TextSize>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct InFileWrapper<FileKind, T> {
pub file_id: FileKind,
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InRealFile<T> = InFileWrapper<FileId, T>;
impl<FileKind, T> InFileWrapper<FileKind, T> {
pub fn new(file_id: FileKind, value: T) -> Self {
Self { file_id, value }
}
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, f(self.value))
}
}
impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, value)
}
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
self.with_value(&self.value)
}
}
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
self.with_value(self.value.clone())
}
}
impl<T> From<InMacroFile<T>> for InFile<T> {
fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
impl<T> From<InRealFile<T>> for InFile<T> {
fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
// region:transpose impls
impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
Some(InFileWrapper::new(self.file_id, self.value?))
}
}
impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
match self.value {
Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
}
}
}
// endregion:transpose impls
trait FileIdToSyntax: Copy {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
}
impl FileIdToSyntax for FileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
}
impl FileIdToSyntax for HirFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self)
}
}
#[allow(private_bounds)]
impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
FileIdToSyntax::file_syntax(self.file_id, db)
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls
impl InFile<&SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file_id = node.file_id.macro_file()?;
let parent_node = macro_file_id.call_node(db);
if macro_file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
}
}
}
pub fn original_syntax_node(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) =
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let value = anc.ancestors().find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
}
}
impl InMacroFile<SyntaxToken> {
pub fn upmap_once(
self,
db: &dyn db::ExpandDatabase,
) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
}
}
impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.span_for_offset(db, self.value.text_range().start());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return range;
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.span_for_offset(db, self.value.text_range().start());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
Some(range)
} else {
None
}
}
}
}
}
impl InMacroFile<TextSize> {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
}
}
impl InFile<TextRange> {
pub fn original_node_file_range(
self,
db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContextId) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
(loc.kind.original_call_range(db), SyntaxContextId::ROOT)
}
}
}
}
}
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
Some((it, SyntaxContextId::ROOT)) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
}
}
pub fn original_node_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
}
}
}
}
impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
.map_node_range_up(db, self.value.syntax().text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(file_id, value))
}
}

View file

@ -1,111 +1,124 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData},
FileId,
};
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use tt::Spacing;
use crate::{
span::SpanMapRef,
tt::{Ident, Leaf, Punct, Subtree},
};
use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
pub(crate) token_map: TokenMap,
pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
original: Box<[Subtree]>,
// FIXME: ThinArc<[Subtree]>
original: Option<Arc<Box<[Subtree]>>>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
// censoring -> just don't convert the node
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let mut token_map = TokenMap::default();
let mut next_id = 0;
let dummy_range = TextRange::empty(TextSize::new(0));
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
let dummy_anchor = SpanAnchor {
file_id: FileId::from_raw(!0),
ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
};
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
ctx: span_map.span_for_range(range).ctx,
};
while let Some(event) = preorder.next() {
let node = match event {
syntax::WalkEvent::Enter(node) => node,
syntax::WalkEvent::Leave(_) => continue,
};
let syntax::WalkEvent::Enter(node) = event else { continue };
let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
let (original_tree, new_tmap, new_next_id) =
mbe::syntax_node_to_token_tree_with_modifications(
&node,
mem::take(&mut token_map),
next_id,
Default::default(),
Default::default(),
);
token_map = new_tmap;
next_id = new_next_id;
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
let replacement = SyntheticToken {
kind: SyntaxKind::IDENT,
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: node.text_range(),
id: SyntheticTokenId(idx),
};
replace.insert(node.clone().into(), vec![replacement]);
span: SpanData {
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
// In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range),
}),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range),
}),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -117,28 +130,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -150,46 +160,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -201,29 +207,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID
},
span: fake_span(node_range)
}),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -234,10 +237,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
(SyntaxKind::UNDERSCORE, "_"),
(SyntaxKind::IN_KW, "in"),
(SyntaxKind::IDENT, "__ra_fixup")
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
"_",
"in",
"__ra_fixup"
].map(|text|
Leaf::Ident(Ident {
text: text.into(),
span: fake_span(node_range)
}),
);
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@ -248,18 +256,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -267,12 +274,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
replace,
token_map,
next_id,
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
remove,
undo_info: SyntaxFixupUndoInfo {
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
},
}
}
@ -288,30 +296,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
pub(crate) fn reverse_fixups(
tt: &mut Subtree,
token_map: &TokenMap,
undo_info: &SyntaxFixupUndoInfo,
) {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
reverse_fixups_(tt, undo_info);
}
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
// delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
}
tt::TokenTree::Subtree(st) => {
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
let span = leaf.span();
span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
reverse_fixups(&mut tt, token_map, undo_info);
reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
let original = undo_info.original[id.0 as usize].clone();
if leaf.span().anchor.file_id == FileId::from_raw(!0) {
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
@ -327,11 +337,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
use base_db::FileId;
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::tt;
use super::reverse_fixups;
use crate::{
fixup::reverse_fixups,
span::{RealSpanMap, SpanMap},
tt,
};
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@ -361,13 +375,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
let fixups = super::fixup_syntax(&parsed.syntax_node());
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
fixups.token_map,
fixups.next_id,
fixups.replace,
span_map.as_ref(),
fixups.append,
fixups.remove,
);
let actual = format!("{tt}\n");
@ -383,14 +397,15 @@ mod tests {
parse.syntax_node()
);
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
let original_as_tt =
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
"different token tree: {tt:?},\n{original_as_tt:?}"
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@ -403,7 +418,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for _ in __ra_fixup {}}
fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@ -431,7 +446,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for bar in qux {}}
fn foo () {for bar in qux { }}
"#]],
)
}
@ -462,7 +477,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -494,7 +509,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -609,7 +624,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if a {}}
fn foo () {if a { }}
"#]],
)
}
@ -623,7 +638,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {}}
fn foo () {if __ra_fixup { }}
"#]],
)
}
@ -637,7 +652,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {} {}}
fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@ -651,7 +666,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while __ra_fixup {}}
fn foo () {while __ra_fixup { }}
"#]],
)
}
@ -665,7 +680,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while foo {}}
fn foo () {while foo { }}
"#]],
)
}
@ -692,7 +707,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {loop {}}
fn foo () {loop { }}
"#]],
)
}

View file

@ -2,252 +2,247 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::CrateId;
use db::TokenExpander;
use either::Either;
use mbe::Origin;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use std::iter;
use crate::{
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
};
use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
#[derive(Clone, Debug)]
pub struct Hygiene {
frames: Option<HygieneFrames>,
use crate::db::ExpandDatabase;
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
pub opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
pub opaque_and_semitransparent: SyntaxContextId,
}
impl Hygiene {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
impl std::fmt::Debug for SyntaxContextData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SyntaxContextData")
.field("outer_expn", &self.outer_expn)
.field("outer_transparency", &self.outer_transparency)
.field("parent", &self.parent)
.field("opaque", &self.opaque)
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
.finish()
}
}
impl SyntaxContextData {
pub fn root() -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
}
}
pub fn new_unhygienic() -> Hygiene {
Hygiene { frames: None }
}
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
pub fn fancy_debug(
self,
self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
if name_ref.text() == "$crate" {
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
return Either::Right(krate);
}
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
match self.outer_expn {
Some(id) => {
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
None => write!(f, "root")?,
}
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
loop {
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
if origin == Origin::Def {
return if current.local_inner {
frames.root_crate(db, path.syntax())
} else {
None
};
}
current = current.call_site.as_ref()?;
token = mapped.value;
}
write!(f, ", {:?})", self.outer_transparency)
}
}
#[derive(Clone, Debug)]
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {
expansion: Option<HygieneInfo>,
// Indicate this is a local inner macro
local_inner: bool,
krate: Option<CrateId>,
call_site: Option<Arc<HygieneFrame>>,
def_site: Option<Arc<HygieneFrame>>,
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
impl HygieneFrames {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
while let Some((mapped, origin)) =
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
{
result = current.krate;
let site = match origin {
Origin::Def => &current.def_site,
Origin::Call => &current.call_site,
};
let site = match site {
None => break,
Some(it) => it,
};
current = site.clone();
token = mapped.value;
}
result
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo {
file: MacroFile,
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}
impl HygieneInfo {
fn map_ident_up(
&self,
db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
let (token_map, tt) = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
mbe::Origin::Call => {
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
}
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
Some((tt.with_value(range + tt.value), origin))
}
}
fn make_hygiene_info(
pub fn span_with_def_site_ctxt(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
}
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
});
pub fn span_with_call_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
}
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
}
fn span_with_ctxt_from_mark(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
transparency: Transparency,
) -> SpanData {
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
pub(super) fn apply_mark(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
call_site_ctxt.normalize_to_macro_rules(db)
};
if call_site_ctxt.is_root() {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
//
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
// at their invocation. That is, we pretend that the macros 1.0 definition
// was defined at its invocation (i.e., inside the macros 2.0 definition)
// so that the macros 2.0 definition remains hygienic.
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
fn apply_mark_internal(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: Option<MacroCallId>,
transparency: Transparency,
) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
if transparency >= Transparency::Opaque {
let parent = opaque;
let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
});
}
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
});
}
let parent = ctxt;
db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
})
}
pub trait SyntaxContextExt {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
#[inline(always)]
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
match n {
SyntaxContextId::SELF_REF => p,
_ => n,
}
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
}
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
MacroDefKind::BuiltInAttr(..) => (info, None, false),
MacroDefKind::BuiltInDerive(..) => (info, None, false),
MacroDefKind::BuiltInEager(..) => (info, None, false),
MacroDefKind::ProcMacro(..) => (info, None, false),
}
}
};
let Some((info, calling_file)) = info else {
return HygieneFrame {
expansion: None,
local_inner,
krate,
call_site: None,
def_site: None,
};
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
impl SyntaxContextExt for SyntaxContextId {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
}
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(*self);
*self = data.parent;
(data.outer_expn, data.outer_transparency)
}
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
marks.reverse();
marks
}
}
// FIXME: Make this a SyntaxContextExt method once we have RPIT
pub fn marks_rev(
ctxt: SyntaxContextId,
db: &dyn ExpandDatabase,
) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
iter::successors(Some(ctxt), move |&mark| {
Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
})
.map(|ctx| ctx.outer_mark(db))
}

Some files were not shown because too many files have changed in this diff Show more