Make token trees no longer generic over the span

Since the proc macro server no longer uses them (with a different span type).
This commit is contained in:
Chayim Refael Friedman 2025-12-28 04:13:49 +02:00
parent 0146f8dc31
commit 6851e398ae
32 changed files with 419 additions and 485 deletions

View file

@ -234,6 +234,7 @@ dependencies = [
"intern",
"oorandom",
"rustc-hash 2.1.1",
"span",
"syntax",
"syntax-bridge",
"tracing",
@ -3086,6 +3087,7 @@ dependencies = [
"arrayvec",
"intern",
"ra-ap-rustc_lexer",
"span",
"stdx",
"text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]

View file

@ -19,6 +19,7 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
syntax = { workspace = true, optional = true }
span = { path = "../span", version = "0.0", optional = true }
intern.workspace = true
[dev-dependencies]
@ -35,6 +36,8 @@ cfg = { path = ".", default-features = false, features = ["tt"] }
[features]
default = []
syntax = ["dep:syntax", "dep:span"]
tt = ["dep:tt"]
in-rust-tree = []
[lints]

View file

@ -96,12 +96,12 @@ impl CfgExpr {
// FIXME: Parsing from `tt` is only used in a handful of places, reconsider
// if we should switch them to AST.
#[cfg(feature = "tt")]
pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
pub fn parse(tt: &tt::TopSubtree) -> CfgExpr {
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
}
#[cfg(feature = "tt")]
pub fn parse_from_iter<S: Copy>(tt: &mut tt::iter::TtIter<'_, S>) -> CfgExpr {
pub fn parse_from_iter(tt: &mut tt::iter::TtIter<'_>) -> CfgExpr {
next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
}
@ -149,7 +149,15 @@ fn next_cfg_expr_from_ast(
if let Some(NodeOrToken::Token(literal)) = it.peek()
&& matches!(literal.kind(), SyntaxKind::STRING)
{
let literal = tt::token_to_literal(literal.text(), ()).symbol;
let dummy_span = span::Span {
range: span::TextRange::empty(span::TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::EditionedFileId::from_raw(0),
ast_id: span::FIXUP_ERASED_FILE_AST_ID_MARKER,
},
ctx: span::SyntaxContext::root(span::Edition::Edition2015),
};
let literal = tt::token_to_literal(literal.text(), dummy_span).symbol;
it.next();
CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
} else {
@ -179,7 +187,7 @@ fn next_cfg_expr_from_ast(
}
#[cfg(feature = "tt")]
fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
fn next_cfg_expr(it: &mut tt::iter::TtIter<'_>) -> Option<CfgExpr> {
use intern::sym;
use tt::iter::TtElement;

View file

@ -103,7 +103,7 @@ fn lower_extra_crate_attrs<'a>(
struct FakeSpanMap {
file_id: span::EditionedFileId,
}
impl syntax_bridge::SpanMapper<Span> for FakeSpanMap {
impl syntax_bridge::SpanMapper for FakeSpanMap {
fn span_for(&self, range: TextRange) -> Span {
Span {
range,

View file

@ -18,7 +18,6 @@ use hir_expand::{
name::Name,
};
use intern::{Interned, Symbol, sym};
use span::Span;
use syntax::{AstNode, T, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
@ -49,7 +48,7 @@ impl AttrsOrCfg {
span_map: S,
) -> AttrsOrCfg
where
S: syntax_bridge::SpanMapper<Span> + Copy,
S: syntax_bridge::SpanMapper + Copy,
{
let mut attrs = Vec::new();
let result =

View file

@ -5,7 +5,7 @@ use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
use span::{Edition, Span, SyntaxContext};
use span::{Edition, Span};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
@ -238,7 +238,7 @@ fn parse_adt(
fn parse_adt_from_syntax(
adt: &ast::Adt,
tm: &span::SpanMap<SyntaxContext>,
tm: &span::SpanMap,
call_site: Span,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, where_clause, shape) = match &adt {
@ -390,7 +390,7 @@ fn to_adt_syntax(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<(ast::Adt, span::SpanMap<SyntaxContext>), ExpandError> {
) -> Result<(ast::Adt, span::SpanMap), ExpandError> {
let (parsed, tm) = crate::db::token_tree_to_syntax_node(db, tt, crate::ExpandTo::Items);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;

View file

@ -8,7 +8,7 @@ use tt::IdentIsRaw;
use crate::{name::Name, tt::TopSubtreeBuilder};
pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
pub(crate) fn dollar_crate(span: Span) -> tt::Ident {
tt::Ident { sym: sym::dollar_crate, span, is_raw: tt::IdentIsRaw::No }
}

View file

@ -66,25 +66,7 @@ pub use crate::{
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, MacroCallStyle, MacroCallStyles, ValueResult};
pub mod tt {
pub use span::Span;
pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing, token_to_literal};
pub type Delimiter = ::tt::Delimiter<Span>;
pub type DelimSpan = ::tt::DelimSpan<Span>;
pub type Subtree = ::tt::Subtree<Span>;
pub type Leaf = ::tt::Leaf<Span>;
pub type Literal = ::tt::Literal<Span>;
pub type Punct = ::tt::Punct<Span>;
pub type Ident = ::tt::Ident<Span>;
pub type TokenTree = ::tt::TokenTree<Span>;
pub type TopSubtree = ::tt::TopSubtree<Span>;
pub type TopSubtreeBuilder = ::tt::TopSubtreeBuilder<Span>;
pub type TokenTreesView<'a> = ::tt::TokenTreesView<'a, Span>;
pub type SubtreeView<'a> = ::tt::SubtreeView<'a, Span>;
pub type TtElement<'a> = ::tt::iter::TtElement<'a, Span>;
pub type TtIter<'a> = ::tt::iter::TtIter<'a, Span>;
}
pub use tt;
#[macro_export]
macro_rules! impl_intern_lookup {

View file

@ -258,7 +258,7 @@ impl AsName for ast::NameOrNameRef {
}
}
impl<Span> AsName for tt::Ident<Span> {
impl AsName for tt::Ident {
fn as_name(&self) -> Name {
Name::new_root(self.sym.as_str())
}

View file

@ -1,6 +1,6 @@
//! Span maps for real files and macro expansions.
use span::{Span, SyntaxContext};
use span::Span;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
@ -8,7 +8,7 @@ pub use span::RealSpanMap;
use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
pub type ExpansionSpanMap = span::SpanMap;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
@ -27,13 +27,13 @@ pub enum SpanMapRef<'a> {
RealSpanMap(&'a RealSpanMap),
}
impl syntax_bridge::SpanMapper<Span> for SpanMap {
impl syntax_bridge::SpanMapper for SpanMap {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
}
impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
impl syntax_bridge::SpanMapper for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}

View file

@ -4,7 +4,7 @@ use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
};
use span::{SpanMap, SyntaxContext, TextRange, TextSize};
use span::{SpanMap, TextRange, TextSize};
use stdx::format_to;
use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted};
@ -142,7 +142,7 @@ fn expand_macro_recur(
sema: &Semantics<'_, RootDatabase>,
macro_call: &ast::Item,
error: &mut String,
result_span_map: &mut SpanMap<SyntaxContext>,
result_span_map: &mut SpanMap,
offset_in_original_node: TextSize,
) -> Option<SyntaxNode> {
let ExpandResult { value: expanded, err } = match macro_call {
@ -171,7 +171,7 @@ fn expand(
sema: &Semantics<'_, RootDatabase>,
expanded: SyntaxNode,
error: &mut String,
result_span_map: &mut SpanMap<SyntaxContext>,
result_span_map: &mut SpanMap,
mut offset_in_original_node: i32,
) -> SyntaxNode {
let children = expanded.descendants().filter_map(ast::Item::cast);
@ -208,7 +208,7 @@ fn format(
kind: SyntaxKind,
file_id: FileId,
expanded: SyntaxNode,
span_map: &SpanMap<SyntaxContext>,
span_map: &SpanMap,
krate: Crate,
) -> String {
let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string();

View file

@ -531,14 +531,14 @@ impl ProcMacroExpander for Expander {
fn expand(
&self,
db: &dyn SourceDatabase,
subtree: &tt::TopSubtree<Span>,
attrs: Option<&tt::TopSubtree<Span>>,
subtree: &tt::TopSubtree,
attrs: Option<&tt::TopSubtree>,
env: &Env,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
) -> Result<tt::TopSubtree<Span>, ProcMacroExpansionError> {
) -> Result<tt::TopSubtree, ProcMacroExpansionError> {
let mut cb = |req| match req {
SubRequest::SourceText { file_id, start, end } => {
let file = FileId::from_raw(file_id);

View file

@ -2,7 +2,6 @@
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
use stdx::itertools::Itertools;
use syntax::{
AstNode,
@ -70,7 +69,7 @@ fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
.collect()
}
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
@ -95,7 +94,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
fn invocation_fixtures(
db: &dyn salsa::Database,
rules: &FxHashMap<String, DeclarativeMacro>,
) -> Vec<(String, tt::TopSubtree<Span>)> {
) -> Vec<(String, tt::TopSubtree)> {
let mut seed = 123456789;
let mut res = Vec::new();
@ -140,7 +139,7 @@ fn invocation_fixtures(
}
return res;
fn collect_from_op(op: &Op, builder: &mut tt::TopSubtreeBuilder<Span>, seed: &mut usize) {
fn collect_from_op(op: &Op, builder: &mut tt::TopSubtreeBuilder, seed: &mut usize) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => builder.push(make_ident("foo")),
@ -226,17 +225,17 @@ fn invocation_fixtures(
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
fn make_ident(ident: &str) -> tt::Leaf<Span> {
fn make_ident(ident: &str) -> tt::Leaf {
tt::Leaf::Ident(tt::Ident {
span: DUMMY,
sym: Symbol::intern(ident),
is_raw: tt::IdentIsRaw::No,
})
}
fn make_punct(char: char) -> tt::Leaf<Span> {
fn make_punct(char: char) -> tt::Leaf {
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone })
}
fn make_literal(lit: &str) -> tt::Leaf<Span> {
fn make_literal(lit: &str) -> tt::Leaf {
tt::Leaf::Literal(tt::Literal {
span: DUMMY,
symbol: Symbol::intern(lit),
@ -244,7 +243,7 @@ fn invocation_fixtures(
suffix: None,
})
}
fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder<Span>) {
fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder) {
builder.open(kind, DUMMY);
builder.close(DUMMY);
}

View file

@ -17,11 +17,11 @@ use crate::{
pub(crate) fn expand_rules(
db: &dyn salsa::Database,
rules: &[crate::Rule],
input: &tt::TopSubtree<Span>,
input: &tt::TopSubtree,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
) -> ExpandResult<(tt::TopSubtree, MatchedArmIndex)> {
let mut match_: Option<(matcher::Match<'_>, &crate::Rule, usize)> = None;
for (idx, rule) in rules.iter().enumerate() {
// Skip any rules that aren't relevant to the call style (fn-like/attr/derive).
@ -129,7 +129,7 @@ enum Fragment<'a> {
Empty,
/// token fragments are just copy-pasted into the output
Tokens {
tree: tt::TokenTreesView<'a, Span>,
tree: tt::TokenTreesView<'a>,
origin: TokensOrigin,
},
/// Expr ast fragments are surrounded with `()` on transcription to preserve precedence.
@ -141,7 +141,7 @@ enum Fragment<'a> {
/// tricky to handle in the parser, and rustc doesn't handle those either.
///
/// The span of the outer delimiters is marked on transcription.
Expr(tt::TokenTreesView<'a, Span>),
Expr(tt::TokenTreesView<'a>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@ -151,8 +151,8 @@ enum Fragment<'a> {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
Path(tt::TokenTreesView<'a, Span>),
TokensOwned(tt::TopSubtree<Span>),
Path(tt::TokenTreesView<'a>),
TokensOwned(tt::TopSubtree),
}
impl Fragment<'_> {

View file

@ -63,7 +63,6 @@ use std::{rc::Rc, sync::Arc};
use intern::{Symbol, sym};
use smallvec::{SmallVec, smallvec};
use span::Span;
use tt::{
DelimSpan,
iter::{TtElement, TtIter},
@ -114,7 +113,7 @@ impl Match<'_> {
pub(super) fn match_<'t>(
db: &dyn salsa::Database,
pattern: &'t MetaTemplate,
input: &'t tt::TopSubtree<Span>,
input: &'t tt::TopSubtree,
) -> Match<'t> {
let mut res = match_loop(db, pattern, input);
res.bound_count = count(res.bindings.bindings());
@ -339,7 +338,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
meta_result: Option<(TtIter<'t, Span>, ExpandResult<Option<Fragment<'t>>>)>,
meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment<'t>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@ -366,8 +365,8 @@ struct MatchState<'t> {
#[inline]
fn match_loop_inner<'t>(
db: &dyn salsa::Database,
src: TtIter<'t, Span>,
stack: &[TtIter<'t, Span>],
src: TtIter<'t>,
stack: &[TtIter<'t>],
res: &mut Match<'t>,
bindings_builder: &mut BindingsBuilder<'t>,
cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
@ -375,7 +374,7 @@ fn match_loop_inner<'t>(
next_items: &mut Vec<MatchState<'t>>,
eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>,
delim_span: tt::DelimSpan<Span>,
delim_span: tt::DelimSpan,
) {
macro_rules! try_push {
($items: expr, $it:expr) => {
@ -623,11 +622,11 @@ fn match_loop_inner<'t>(
fn match_loop<'t>(
db: &dyn salsa::Database,
pattern: &'t MetaTemplate,
src: &'t tt::TopSubtree<Span>,
src: &'t tt::TopSubtree,
) -> Match<'t> {
let span = src.top_subtree().delimiter.delim_span();
let mut src = src.iter();
let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@ -774,8 +773,8 @@ fn match_loop<'t>(
fn match_meta_var<'t>(
db: &dyn salsa::Database,
kind: MetaVarKind,
input: &mut TtIter<'t, Span>,
delim_span: DelimSpan<Span>,
input: &mut TtIter<'t>,
delim_span: DelimSpan,
) -> ExpandResult<Fragment<'t>> {
let fragment = match kind {
MetaVarKind::Path => {
@ -879,10 +878,10 @@ fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate)
}
}
impl MetaTemplate {
fn iter_delimited_with(&self, delimiter: tt::Delimiter<Span>) -> OpDelimitedIter<'_> {
fn iter_delimited_with(&self, delimiter: tt::Delimiter) -> OpDelimitedIter<'_> {
OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter }
}
fn iter_delimited(&self, span: tt::DelimSpan<Span>) -> OpDelimitedIter<'_> {
fn iter_delimited(&self, span: tt::DelimSpan) -> OpDelimitedIter<'_> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
@ -901,7 +900,7 @@ enum OpDelimited<'a> {
#[derive(Debug, Clone, Copy)]
struct OpDelimitedIter<'a> {
inner: &'a [Op],
delimited: tt::Delimiter<Span>,
delimited: tt::Delimiter,
idx: usize,
}
@ -945,7 +944,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) -> bool {
fn expect_separator(iter: &mut TtIter<'_>, separator: &Separator) -> bool {
let mut fork = iter.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@ -979,7 +978,7 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
ok
}
fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
fn expect_tt(iter: &mut TtIter<'_>) -> Result<(), ()> {
if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = iter.peek() {
if punct.char == '\'' {
expect_lifetime(iter)?;
@ -992,7 +991,7 @@ fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
Ok(())
}
fn expect_lifetime<'a, S: Copy>(iter: &mut TtIter<'a, S>) -> Result<&'a tt::Ident<S>, ()> {
fn expect_lifetime<'a>(iter: &mut TtIter<'a>) -> Result<&'a tt::Ident, ()> {
let punct = iter.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@ -1000,7 +999,7 @@ fn expect_lifetime<'a, S: Copy>(iter: &mut TtIter<'a, S>) -> Result<&'a tt::Iden
iter.expect_ident_or_underscore()
}
fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) {
fn eat_char(iter: &mut TtIter<'_>, c: char) {
if matches!(iter.peek(), Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char, .. }))) if *char == c)
{
iter.next().expect("already peeked");

View file

@ -125,7 +125,7 @@ pub(super) fn transcribe(
bindings: &Bindings<'_>,
marker: impl Fn(&mut Span) + Copy,
call_site: Span,
) -> ExpandResult<tt::TopSubtree<Span>> {
) -> ExpandResult<tt::TopSubtree> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(ctx.call_site));
expand_subtree(&mut ctx, template, &mut builder, marker).map(|()| builder.build())
@ -152,8 +152,8 @@ struct ExpandCtx<'a> {
fn expand_subtree_with_delimiter(
ctx: &mut ExpandCtx<'_>,
template: &MetaTemplate,
builder: &mut tt::TopSubtreeBuilder<Span>,
delimiter: Option<Delimiter<Span>>,
builder: &mut tt::TopSubtreeBuilder,
delimiter: Option<Delimiter>,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
let delimiter = delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site));
@ -166,7 +166,7 @@ fn expand_subtree_with_delimiter(
fn expand_subtree(
ctx: &mut ExpandCtx<'_>,
template: &MetaTemplate,
builder: &mut tt::TopSubtreeBuilder<Span>,
builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
let mut err = None;
@ -382,7 +382,7 @@ fn expand_var(
ctx: &mut ExpandCtx<'_>,
v: &Symbol,
id: Span,
builder: &mut tt::TopSubtreeBuilder<Span>,
builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
// We already handle $crate case in mbe parser
@ -466,7 +466,7 @@ fn expand_repeat(
template: &MetaTemplate,
kind: RepeatKind,
separator: Option<&Separator>,
builder: &mut tt::TopSubtreeBuilder<Span>,
builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
@ -546,8 +546,8 @@ fn expand_repeat(
/// we need this fixup.
fn fix_up_and_push_path_tt(
ctx: &ExpandCtx<'_>,
builder: &mut tt::TopSubtreeBuilder<Span>,
subtree: tt::TokenTreesView<'_, Span>,
builder: &mut tt::TopSubtreeBuilder,
subtree: tt::TokenTreesView<'_>,
) {
let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the

View file

@ -152,7 +152,7 @@ impl DeclarativeMacro {
/// The old, `macro_rules! m {}` flavor.
pub fn parse_macro_rules(
tt: &tt::TopSubtree<Span>,
tt: &tt::TopSubtree,
ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
) -> DeclarativeMacro {
// Note: this parsing can be implemented using mbe machinery itself, by
@ -191,8 +191,8 @@ impl DeclarativeMacro {
/// The new, unstable `macro m {}` flavor.
pub fn parse_macro2(
args: Option<&tt::TopSubtree<Span>>,
body: &tt::TopSubtree<Span>,
args: Option<&tt::TopSubtree>,
body: &tt::TopSubtree,
ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
) -> DeclarativeMacro {
let mut rules = Vec::new();
@ -276,11 +276,11 @@ impl DeclarativeMacro {
pub fn expand(
&self,
db: &dyn salsa::Database,
tt: &tt::TopSubtree<Span>,
tt: &tt::TopSubtree,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
) -> ExpandResult<(tt::TopSubtree, MatchedArmIndex)> {
expander::expand_rules(db, &self.rules, tt, marker, call_style, call_site)
}
}
@ -288,7 +288,7 @@ impl DeclarativeMacro {
impl Rule {
fn parse(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
src: &mut TtIter<'_, Span>,
src: &mut TtIter<'_>,
) -> Result<Self, ParseError> {
// Parse an optional `attr()` or `derive()` prefix before the LHS pattern.
let style = parser::parse_rule_style(src)?;
@ -391,10 +391,10 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
pub fn expect_fragment<'t>(
db: &dyn salsa::Database,
tt_iter: &mut TtIter<'t, Span>,
tt_iter: &mut TtIter<'t>,
entry_point: ::parser::PrefixEntryPoint,
delim_span: DelimSpan<Span>,
) -> ExpandResult<tt::TokenTreesView<'t, Span>> {
delim_span: DelimSpan,
) -> ExpandResult<tt::TokenTreesView<'t>> {
use ::parser;
let buffer = tt_iter.remaining();
let parser_input = to_parser_input(buffer, &mut |ctx| ctx.edition(db));

View file

@ -13,7 +13,7 @@ use tt::{
use crate::{MacroCallStyle, ParseError};
pub(crate) fn parse_rule_style(src: &mut TtIter<'_, Span>) -> Result<MacroCallStyle, ParseError> {
pub(crate) fn parse_rule_style(src: &mut TtIter<'_>) -> Result<MacroCallStyle, ParseError> {
// Skip an optional `unsafe`. This is only actually allowed for `attr`
// rules, but we'll let rustc worry about that.
if let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = src.peek()
@ -59,14 +59,14 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
impl MetaTemplate {
pub(crate) fn parse_pattern(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
pattern: TtIter<'_, Span>,
pattern: TtIter<'_>,
) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, pattern, Mode::Pattern)
}
pub(crate) fn parse_template(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
template: TtIter<'_, Span>,
template: TtIter<'_>,
) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, template, Mode::Template)
}
@ -77,7 +77,7 @@ impl MetaTemplate {
fn parse(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
mut src: TtIter<'_, Span>,
mut src: TtIter<'_>,
mode: Mode,
) -> Result<Self, ParseError> {
let mut res = Vec::new();
@ -123,23 +123,23 @@ pub(crate) enum Op {
},
Subtree {
tokens: MetaTemplate,
delimiter: tt::Delimiter<Span>,
delimiter: tt::Delimiter,
},
Literal(tt::Literal<Span>),
Punct(Box<ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>>),
Ident(tt::Ident<Span>),
Literal(tt::Literal),
Punct(Box<ArrayVec<tt::Punct, MAX_GLUED_PUNCT_LEN>>),
Ident(tt::Ident),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum ConcatMetaVarExprElem {
/// There is NO preceding dollar sign, which means that this identifier should be interpreted
/// as a literal.
Ident(tt::Ident<Span>),
Ident(tt::Ident),
/// There is a preceding dollar sign, which means that this identifier should be expanded
/// and interpreted as a variable.
Var(tt::Ident<Span>),
Var(tt::Ident),
/// For example, a number or a string.
Literal(tt::Literal<Span>),
Literal(tt::Literal),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@ -179,10 +179,10 @@ pub(crate) enum MetaVarKind {
#[derive(Clone, Debug, Eq)]
pub(crate) enum Separator {
Literal(tt::Literal<Span>),
Ident(tt::Ident<Span>),
Puncts(ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>),
Lifetime(tt::Punct<Span>, tt::Ident<Span>),
Literal(tt::Literal),
Ident(tt::Ident),
Puncts(ArrayVec<tt::Punct, MAX_GLUED_PUNCT_LEN>),
Lifetime(tt::Punct, tt::Ident),
}
// Note that when we compare a Separator, we just care about its textual value.
@ -212,8 +212,8 @@ enum Mode {
fn next_op(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
first_peeked: TtElement<'_, Span>,
src: &mut TtIter<'_, Span>,
first_peeked: TtElement<'_>,
src: &mut TtIter<'_>,
mode: Mode,
) -> Result<Op, ParseError> {
let res = match first_peeked {
@ -320,7 +320,7 @@ fn next_op(
fn eat_fragment_kind(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
src: &mut TtIter<'_, Span>,
src: &mut TtIter<'_>,
mode: Mode,
) -> Result<Option<MetaVarKind>, ParseError> {
if let Mode::Pattern = mode {
@ -363,11 +363,11 @@ fn eat_fragment_kind(
Ok(None)
}
fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool {
fn is_boolean_literal(lit: &tt::Literal) -> bool {
matches!(lit.symbol.as_str(), "true" | "false")
}
fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
let mut separator = Separator::Puncts(ArrayVec::new());
for tt in src {
let tt = match tt {
@ -413,7 +413,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
Err(ParseError::InvalidRepeat)
}
fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
let func = src.expect_ident()?;
let (args, mut args_iter) = src.expect_subtree()?;
@ -475,7 +475,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
Ok(op)
}
fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
if src.is_empty() {
Ok(0)
} else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) =
@ -488,7 +488,7 @@ fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
}
}
fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek() {
let _ = src.next();
return true;
@ -496,7 +496,7 @@ fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
false
}
fn try_eat_dollar(src: &mut TtIter<'_, Span>) -> bool {
fn try_eat_dollar(src: &mut TtIter<'_>) -> bool {
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. }))) = src.peek() {
let _ = src.next();
return true;

View file

@ -138,16 +138,15 @@ pub(crate) fn find_proc_macros(
pub(crate) fn expand(
proc_macro: &ProcMacro,
subtree: tt::SubtreeView<'_, Span>,
attr: Option<tt::SubtreeView<'_, Span>>,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
callback: SubCallback<'_>,
) -> Result<Result<tt::TopSubtree<span::SpanData<span::SyntaxContext>>, String>, crate::ServerError>
{
) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
let version = proc_macro.process.version();
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;

View file

@ -77,15 +77,14 @@ pub(crate) fn find_proc_macros(
pub(crate) fn expand(
proc_macro: &ProcMacro,
subtree: tt::SubtreeView<'_, Span>,
attr: Option<tt::SubtreeView<'_, Span>>,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
) -> Result<Result<tt::TopSubtree<span::SpanData<span::SyntaxContext>>, String>, crate::ServerError>
{
) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
let version = proc_macro.process.version();
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;

View file

@ -185,7 +185,7 @@ mod tests {
use super::*;
fn fixture_token_tree_top_many_none() -> TopSubtree<Span> {
fn fixture_token_tree_top_many_none() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
@ -292,7 +292,7 @@ mod tests {
builder.build()
}
fn fixture_token_tree_top_empty_none() -> TopSubtree<Span> {
fn fixture_token_tree_top_empty_none() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
@ -318,7 +318,7 @@ mod tests {
builder.build()
}
fn fixture_token_tree_top_empty_brace() -> TopSubtree<Span> {
fn fixture_token_tree_top_empty_brace() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),

View file

@ -123,7 +123,7 @@ struct IdentRepr {
impl FlatTree {
pub fn from_subtree(
subtree: tt::SubtreeView<'_, Span>,
subtree: tt::SubtreeView<'_>,
version: u32,
span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
@ -168,7 +168,7 @@ impl FlatTree {
self,
version: u32,
span_data_table: &SpanDataIndexMap,
) -> tt::TopSubtree<Span> {
) -> tt::TopSubtree {
Reader::<Span> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
@ -486,8 +486,8 @@ struct Writer<'a, 'span, S: SpanTransformer, W> {
text: Vec<String>,
}
impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
fn write_subtree(&mut self, root: tt::SubtreeView<'a, T::Span>) {
impl<'a, T: SpanTransformer<Span = span::Span>> Writer<'a, '_, T, tt::iter::TtIter<'a>> {
fn write_subtree(&mut self, root: tt::SubtreeView<'a>) {
let subtree = root.top_subtree();
self.enqueue(subtree, root.iter());
while let Some((idx, len, subtree)) = self.work.pop_front() {
@ -495,7 +495,7 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
}
}
fn subtree(&mut self, idx: usize, n_tt: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
fn subtree(&mut self, idx: usize, n_tt: usize, subtree: tt::iter::TtIter<'a>) {
let mut first_tt = self.token_tree.len();
self.token_tree.resize(first_tt + n_tt, !0);
@ -565,11 +565,7 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
}
}
fn enqueue(
&mut self,
subtree: &'a tt::Subtree<T::Span>,
contents: tt::iter::TtIter<'a, T::Span>,
) -> u32 {
fn enqueue(&mut self, subtree: &'a tt::Subtree, contents: tt::iter::TtIter<'a>) -> u32 {
let idx = self.subtree.len();
let open = self.token_id_of(subtree.delimiter.open);
let close = self.token_id_of(subtree.delimiter.close);
@ -739,9 +735,9 @@ struct Reader<'span, S: SpanTransformer> {
span_data_table: &'span S::Table,
}
impl<T: SpanTransformer> Reader<'_, T> {
pub(crate) fn read_subtree(self) -> tt::TopSubtree<T::Span> {
let mut res: Vec<Option<(tt::Delimiter<T::Span>, Vec<tt::TokenTree<T::Span>>)>> =
impl<T: SpanTransformer<Span = span::Span>> Reader<'_, T> {
pub(crate) fn read_subtree(self) -> tt::TopSubtree {
let mut res: Vec<Option<(tt::Delimiter, Vec<tt::TokenTree>)>> =
vec![None; self.subtree.len()];
let read_span = |id| T::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {

View file

@ -192,7 +192,7 @@ impl ProcMacro {
}
/// On some server versions, the fixup ast id is different than ours. So change it to match.
fn change_fixup_to_match_old_server(&self, tt: &mut tt::TopSubtree<Span>) {
fn change_fixup_to_match_old_server(&self, tt: &mut tt::TopSubtree) {
const OLD_FIXUP_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(!0 - 1);
let change_ast_id = |ast_id: &mut ErasedFileAstId| {
if *ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
@ -222,15 +222,15 @@ impl ProcMacro {
/// This includes span information and environmental context.
pub fn expand(
&self,
subtree: tt::SubtreeView<'_, Span>,
attr: Option<tt::SubtreeView<'_, Span>>,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
callback: Option<SubCallback<'_>>,
) -> Result<Result<tt::TopSubtree<Span>, String>, ServerError> {
) -> Result<Result<tt::TopSubtree, String>, ServerError> {
let (mut subtree, mut attr) = (subtree, attr);
let (mut subtree_changed, mut attr_changed);
if self.needs_fixup_change() {

View file

@ -204,15 +204,15 @@ impl ProcMacroServerProcess {
pub(crate) fn expand(
&self,
proc_macro: &ProcMacro,
subtree: tt::SubtreeView<'_, Span>,
attr: Option<tt::SubtreeView<'_, Span>>,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
callback: Option<SubCallback<'_>>,
) -> Result<Result<tt::TopSubtree<Span>, String>, ServerError> {
) -> Result<Result<tt::TopSubtree, String>, ServerError> {
match self.protocol {
Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
legacy_protocol::expand(

View file

@ -24,8 +24,6 @@ pub use syntax::Edition;
pub use text_size::{TextRange, TextSize};
pub use vfs::FileId;
pub type Span = SpanData<SyntaxContext>;
impl Span {
pub fn cover(self, other: Span) -> Span {
if self.anchor != other.anchor {
@ -61,13 +59,17 @@ impl Span {
}
Some(Span { range: self.range.cover(other.range), anchor: other.anchor, ctx: other.ctx })
}
pub fn eq_ignoring_ctx(self, other: Self) -> bool {
self.anchor == other.anchor && self.range == other.range
}
}
/// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs
/// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental
/// friendly.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanData<Ctx> {
pub struct Span {
/// The text range of this span, relative to the anchor.
/// We need the anchor for incrementality, as storing absolute ranges will require
/// recomputation on every change in a file at all times.
@ -75,10 +77,10 @@ pub struct SpanData<Ctx> {
/// The anchor this span is relative to.
pub anchor: SpanAnchor,
/// The syntax context of the span.
pub ctx: Ctx,
pub ctx: SyntaxContext,
}
impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;
@ -98,12 +100,6 @@ impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
}
}
impl<Ctx: Copy> SpanData<Ctx> {
pub fn eq_ignoring_ctx(self, other: Self) -> bool {
self.anchor == other.anchor && self.range == other.range
}
}
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;

View file

@ -6,24 +6,21 @@ use std::{fmt, hash::Hash};
use stdx::{always, itertools::Itertools};
use crate::{
EditionedFileId, ErasedFileAstId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SpanData,
SyntaxContext, TextRange, TextSize,
EditionedFileId, ErasedFileAstId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext,
TextRange, TextSize,
};
/// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct SpanMap<S> {
pub struct SpanMap {
/// The offset stored here is the *end* of the node.
spans: Vec<(TextSize, SpanData<S>)>,
spans: Vec<(TextSize, Span)>,
/// Index of the matched macro arm on successful expansion for declarative macros.
// FIXME: Does it make sense to have this here?
pub matched_arm: Option<u32>,
}
impl<S> SpanMap<S>
where
SpanData<S>: Copy,
{
impl SpanMap {
/// Creates a new empty [`SpanMap`].
pub fn empty() -> Self {
Self { spans: Vec::new(), matched_arm: None }
@ -40,7 +37,7 @@ where
}
/// Pushes a new span onto the [`SpanMap`].
pub fn push(&mut self, offset: TextSize, span: SpanData<S>) {
pub fn push(&mut self, offset: TextSize, span: Span) {
if cfg!(debug_assertions)
&& let Some(&(last_offset, _)) = self.spans.last()
{
@ -57,11 +54,8 @@ where
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span_exact(
&self,
span: SpanData<S>,
) -> impl Iterator<Item = (TextRange, S)> + '_
where
S: Copy,
{
span: Span,
) -> impl Iterator<Item = (TextRange, SyntaxContext)> + '_ {
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if !s.eq_ignoring_ctx(span) {
return None;
@ -74,10 +68,10 @@ where
/// Returns all [`TextRange`]s whose spans contain the given span.
///
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_
where
S: Copy,
{
pub fn ranges_with_span(
&self,
span: Span,
) -> impl Iterator<Item = (TextRange, SyntaxContext)> + '_ {
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if s.anchor != span.anchor {
return None;
@ -91,28 +85,28 @@ where
}
/// Returns the span at the given position.
pub fn span_at(&self, offset: TextSize) -> SpanData<S> {
pub fn span_at(&self, offset: TextSize) -> Span {
let entry = self.spans.partition_point(|&(it, _)| it <= offset);
self.spans[entry].1
}
/// Returns the spans associated with the given range.
/// In other words, this will return all spans that correspond to all offsets within the given range.
pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = SpanData<S>> + '_ {
pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = Span> + '_ {
let (start, end) = (range.start(), range.end());
let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
self.spans[start_entry..][..end_entry].iter().map(|&(_, s)| s)
}
pub fn iter(&self) -> impl Iterator<Item = (TextSize, SpanData<S>)> + '_ {
pub fn iter(&self) -> impl Iterator<Item = (TextSize, Span)> + '_ {
self.spans.iter().copied()
}
/// Merges this span map with another span map, where `other` is inserted at (and replaces) `other_range`.
///
/// The length of the replacement node needs to be `other_size`.
pub fn merge(&mut self, other_range: TextRange, other_size: TextSize, other: &SpanMap<S>) {
pub fn merge(&mut self, other_range: TextRange, other_size: TextSize, other: &SpanMap) {
// I find the following diagram helpful to illustrate the bounds and why we use `<` or `<=`:
// --------------------------------------------------------------------
// 1 3 5 6 7 10 11 <-- offsets we store
@ -157,7 +151,7 @@ where
}
#[cfg(not(no_salsa_async_drops))]
impl<S> Drop for SpanMap<S> {
impl Drop for SpanMap {
fn drop(&mut self) {
struct SendPtr(*mut [()]);
unsafe impl Send for SendPtr {}
@ -175,18 +169,17 @@ impl<S> Drop for SpanMap<S> {
})
.send((
unsafe {
SendPtr(std::mem::transmute::<*mut [(TextSize, SpanData<S>)], *mut [()]>(
Box::<[(TextSize, SpanData<S>)]>::into_raw(
std::mem::take(&mut self.spans).into_boxed_slice(),
),
))
SendPtr(std::mem::transmute::<*mut [(TextSize, Span)], *mut [()]>(Box::<
[(TextSize, Span)],
>::into_raw(
std::mem::take(&mut self.spans).into_boxed_slice(),
)))
},
|b: SendPtr| {
_ = unsafe {
Box::from_raw(std::mem::transmute::<
*mut [()],
*mut [(TextSize, SpanData<S>)],
>(b.0))
Box::from_raw(std::mem::transmute::<*mut [()], *mut [(TextSize, Span)]>(
b.0,
))
}
},
))

View file

@ -9,7 +9,7 @@ use std::{collections::VecDeque, fmt, hash::Hash};
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, SpanAnchor, SpanData, SpanMap};
use span::{Edition, Span, SpanAnchor, SpanMap, SyntaxContext};
use stdx::{format_to, never};
use syntax::{
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
@ -29,21 +29,18 @@ pub use ::parser::TopEntryPoint;
#[cfg(test)]
mod tests;
pub trait SpanMapper<S> {
fn span_for(&self, range: TextRange) -> S;
pub trait SpanMapper {
fn span_for(&self, range: TextRange) -> Span;
}
impl<S> SpanMapper<SpanData<S>> for SpanMap<S>
where
SpanData<S>: Copy,
{
fn span_for(&self, range: TextRange) -> SpanData<S> {
impl SpanMapper for SpanMap {
fn span_for(&self, range: TextRange) -> Span {
self.span_at(range.start())
}
}
impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
fn span_for(&self, range: TextRange) -> S {
impl<SM: SpanMapper> SpanMapper for &SM {
fn span_for(&self, range: TextRange) -> Span {
SM::span_for(self, range)
}
}
@ -69,7 +66,7 @@ pub mod dummy_test_span_utils {
pub struct DummyTestSpanMap;
impl SpanMapper<Span> for DummyTestSpanMap {
impl SpanMapper for DummyTestSpanMap {
fn span_for(&self, range: syntax::TextRange) -> Span {
Span {
range,
@ -97,15 +94,14 @@ pub enum DocCommentDesugarMode {
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans.
pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
pub fn syntax_node_to_token_tree<SpanMap>(
node: &SyntaxNode,
map: SpanMap,
span: SpanData<Ctx>,
span: Span,
mode: DocCommentDesugarMode,
) -> tt::TopSubtree<SpanData<Ctx>>
) -> tt::TopSubtree
where
SpanData<Ctx>: Copy + fmt::Debug,
SpanMap: SpanMapper<SpanData<Ctx>>,
SpanMap: SpanMapper,
{
let mut c =
Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
@ -117,22 +113,18 @@ where
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
/// be injected or hidden from the output.
pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap, OnEvent>(
pub fn syntax_node_to_token_tree_modified<SpanMap, OnEvent>(
node: &SyntaxNode,
map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
call_site: SpanData<Ctx>,
call_site: Span,
mode: DocCommentDesugarMode,
on_enter: OnEvent,
) -> tt::TopSubtree<SpanData<Ctx>>
) -> tt::TopSubtree
where
SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Copy + fmt::Debug,
OnEvent: FnMut(
&mut PreorderWithTokens,
&WalkEvent<SyntaxElement>,
) -> (bool, Vec<tt::Leaf<SpanData<Ctx>>>),
SpanMap: SpanMapper,
OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
convert_tokens(&mut c)
@ -152,13 +144,13 @@ where
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::TopSubtree<SpanData<Ctx>>,
pub fn token_tree_to_syntax_node(
tt: &tt::TopSubtree,
entry_point: parser::TopEntryPoint,
span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
span_to_edition: &mut dyn FnMut(SyntaxContext) -> Edition,
) -> (Parse<SyntaxNode>, SpanMap)
where
Ctx: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
SyntaxContext: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
{
let buffer = tt.view().strip_invisible();
let parser_input = to_parser_input(buffer, span_to_edition);
@ -183,16 +175,12 @@ where
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
/// anchor with the given context.
pub fn parse_to_token_tree<Ctx>(
pub fn parse_to_token_tree(
edition: Edition,
anchor: SpanAnchor,
ctx: Ctx,
ctx: SyntaxContext,
text: &str,
) -> Option<tt::TopSubtree<SpanData<Ctx>>>
where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
{
) -> Option<tt::TopSubtree> {
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
@ -203,14 +191,11 @@ where
}
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
pub fn parse_to_token_tree_static_span<S>(
pub fn parse_to_token_tree_static_span(
edition: Edition,
span: S,
span: Span,
text: &str,
) -> Option<tt::TopSubtree<S>>
where
S: Copy + fmt::Debug,
{
) -> Option<tt::TopSubtree> {
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
@ -220,10 +205,9 @@ where
Some(convert_tokens(&mut conv))
}
fn convert_tokens<S, C>(conv: &mut C) -> tt::TopSubtree<S>
fn convert_tokens<C>(conv: &mut C) -> tt::TopSubtree
where
C: TokenConverter<S>,
S: Copy + fmt::Debug,
C: TokenConverter,
C::Token: fmt::Debug,
{
let mut builder =
@ -327,7 +311,7 @@ where
.into()
};
}
let leaf: tt::Leaf<_> = match kind {
let leaf: tt::Leaf = match kind {
k if k.is_any_identifier() => {
let text = token.to_text(conv);
tt::Ident::new(&text, conv.span_for(abs_range)).into()
@ -435,11 +419,11 @@ pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (Sym
}
}
fn convert_doc_comment<S: Copy>(
fn convert_doc_comment(
token: &syntax::SyntaxToken,
span: S,
span: Span,
mode: DocCommentDesugarMode,
builder: &mut tt::TopSubtreeBuilder<S>,
builder: &mut tt::TopSubtreeBuilder,
) {
let Some(comment) = ast::Comment::cast(token.clone()) else { return };
let Some(doc) = comment.kind().doc else { return };
@ -479,92 +463,84 @@ fn convert_doc_comment<S: Copy>(
}
/// A raw token (straight from lexer) converter
struct RawConverter<'a, Ctx> {
struct RawConverter<'a> {
lexed: parser::LexedStr<'a>,
pos: usize,
anchor: SpanAnchor,
ctx: Ctx,
ctx: SyntaxContext,
mode: DocCommentDesugarMode,
}
/// A raw token (straight from lexer) converter that gives every token the same span.
struct StaticRawConverter<'a, S> {
struct StaticRawConverter<'a> {
lexed: parser::LexedStr<'a>,
pos: usize,
span: S,
span: Span,
mode: DocCommentDesugarMode,
}
trait SrcToken<Ctx, S> {
trait SrcToken<Ctx> {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
fn as_leaf(&self) -> Option<&tt::Leaf> {
None
}
}
trait TokenConverter<S>: Sized {
type Token: SrcToken<Self, S>;
trait TokenConverter: Sized {
type Token: SrcToken<Self>;
fn convert_doc_comment(
&self,
token: &Self::Token,
span: S,
builder: &mut tt::TopSubtreeBuilder<S>,
span: Span,
builder: &mut tt::TopSubtreeBuilder,
);
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
fn span_for(&self, range: TextRange) -> S;
fn span_for(&self, range: TextRange) -> Span;
fn call_site(&self) -> S;
fn call_site(&self) -> Span;
}
impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
fn kind(&self, ctx: &RawConverter<'_, Ctx>) -> SyntaxKind {
impl SrcToken<RawConverter<'_>> for usize {
fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
fn to_char(&self, ctx: &RawConverter<'_, Ctx>) -> Option<char> {
fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr {
fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
impl<S: Copy> SrcToken<StaticRawConverter<'_, S>, S> for usize {
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
impl SrcToken<StaticRawConverter<'_>> for usize {
fn kind(&self, ctx: &StaticRawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
fn to_char(&self, ctx: &StaticRawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
fn to_text(&self, ctx: &StaticRawConverter<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
where
SpanData<Ctx>: Copy,
{
impl TokenConverter for RawConverter<'_> {
type Token = usize;
fn convert_doc_comment(
&self,
&token: &usize,
span: SpanData<Ctx>,
builder: &mut tt::TopSubtreeBuilder<SpanData<Ctx>>,
) {
fn convert_doc_comment(&self, &token: &usize, span: Span, builder: &mut tt::TopSubtreeBuilder) {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
}
@ -588,22 +564,19 @@ where
Some(self.pos)
}
fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
SpanData { range, anchor: self.anchor, ctx: self.ctx }
fn span_for(&self, range: TextRange) -> Span {
Span { range, anchor: self.anchor, ctx: self.ctx }
}
fn call_site(&self) -> SpanData<Ctx> {
SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
fn call_site(&self) -> Span {
Span { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
}
}
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
where
S: Copy,
{
impl TokenConverter for StaticRawConverter<'_> {
type Token = usize;
fn convert_doc_comment(&self, &token: &usize, span: S, builder: &mut tt::TopSubtreeBuilder<S>) {
fn convert_doc_comment(&self, &token: &usize, span: Span, builder: &mut tt::TopSubtreeBuilder) {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
}
@ -627,40 +600,40 @@ where
Some(self.pos)
}
fn span_for(&self, _: TextRange) -> S {
fn span_for(&self, _: TextRange) -> Span {
self.span
}
fn call_site(&self) -> S {
fn call_site(&self) -> Span {
self.span
}
}
struct Converter<SpanMap, S, OnEvent> {
struct Converter<SpanMap, OnEvent> {
current: Option<SyntaxToken>,
current_leaves: VecDeque<tt::Leaf<S>>,
current_leaves: VecDeque<tt::Leaf>,
preorder: PreorderWithTokens,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
/// Used to make the emitted text ranges in the spans relative to the span anchor.
map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
call_site: S,
call_site: Span,
mode: DocCommentDesugarMode,
on_event: OnEvent,
}
impl<SpanMap, S, OnEvent> Converter<SpanMap, S, OnEvent>
impl<SpanMap, OnEvent> Converter<SpanMap, OnEvent>
where
OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
fn new(
node: &SyntaxNode,
map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
call_site: S,
call_site: Span,
mode: DocCommentDesugarMode,
on_enter: OnEvent,
) -> Self {
@ -720,13 +693,13 @@ where
}
#[derive(Debug)]
enum SynToken<S> {
enum SynToken {
Ordinary(SyntaxToken),
Punct { token: SyntaxToken, offset: usize },
Leaf(tt::Leaf<S>),
Leaf(tt::Leaf),
}
impl<S> SynToken<S> {
impl SynToken {
fn token(&self) -> &SyntaxToken {
match self {
SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
@ -735,8 +708,8 @@ impl<S> SynToken<S> {
}
}
impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynToken<S> {
fn kind(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SyntaxKind {
impl<SpanMap, OnEvent> SrcToken<Converter<SpanMap, OnEvent>> for SynToken {
fn kind(&self, _ctx: &Converter<SpanMap, OnEvent>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
SynToken::Punct { token, offset: i } => {
@ -748,14 +721,14 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
}
fn to_char(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> Option<char> {
fn to_char(&self, _ctx: &Converter<SpanMap, OnEvent>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
SynToken::Leaf(_) => None,
}
}
fn to_text(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SmolStr {
fn to_text(&self, _ctx: &Converter<SpanMap, OnEvent>) -> SmolStr {
match self {
SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
SynToken::Leaf(_) => {
@ -764,7 +737,7 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
}
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
fn as_leaf(&self) -> Option<&tt::Leaf> {
match self {
SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
SynToken::Leaf(it) => Some(it),
@ -772,18 +745,17 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
impl<S, SpanMap, OnEvent> TokenConverter<S> for Converter<SpanMap, S, OnEvent>
impl<SpanMap, OnEvent> TokenConverter for Converter<SpanMap, OnEvent>
where
S: Copy,
SpanMap: SpanMapper<S>,
OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
SpanMap: SpanMapper,
OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
type Token = SynToken<S>;
type Token = SynToken;
fn convert_doc_comment(
&self,
token: &Self::Token,
span: S,
builder: &mut tt::TopSubtreeBuilder<S>,
span: Span,
builder: &mut tt::TopSubtreeBuilder,
) {
convert_doc_comment(token.token(), span, self.mode, builder);
}
@ -847,30 +819,24 @@ where
Some(token)
}
fn span_for(&self, range: TextRange) -> S {
fn span_for(&self, range: TextRange) -> Span {
self.map.span_for(range)
}
fn call_site(&self) -> S {
fn call_site(&self) -> Span {
self.call_site
}
}
struct TtTreeSink<'a, Ctx>
where
SpanData<Ctx>: Copy,
{
struct TtTreeSink<'a> {
buf: String,
cursor: Cursor<'a, SpanData<Ctx>>,
cursor: Cursor<'a>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
token_map: SpanMap<Ctx>,
token_map: SpanMap,
}
impl<'a, Ctx> TtTreeSink<'a, Ctx>
where
SpanData<Ctx>: Copy,
{
fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self {
impl<'a> TtTreeSink<'a> {
fn new(cursor: Cursor<'a>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
@ -880,7 +846,7 @@ where
}
}
fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<Ctx>) {
fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap) {
self.token_map.finish();
(self.inner.finish(), self.token_map)
}
@ -898,11 +864,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
impl<Ctx> TtTreeSink<'_, Ctx>
where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: PartialEq,
{
impl TtTreeSink<'_> {
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
@ -1053,10 +1015,10 @@ where
self.inner.error(error, self.text_pos)
}
fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> {
fn merge_spans(a: Span, b: Span) -> Span {
// We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts
// but this runs afoul of our separation between `span` and `hir-expand`.
SpanData {
Span {
range: if a.ctx == b.ctx && a.anchor == b.anchor {
TextRange::new(
std::cmp::min(a.range.start(), b.range.start()),

View file

@ -1,16 +1,13 @@
//! Convert macro-by-example tokens which are specific to macro expansion into a
//! format that works for our parser.
use std::fmt;
use std::hash::Hash;
use rustc_hash::FxHashMap;
use span::{Edition, SpanData};
use span::{Edition, SyntaxContext};
use syntax::{SyntaxKind, SyntaxKind::*, T};
pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
buffer: tt::TokenTreesView<'_, SpanData<Ctx>>,
span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
pub fn to_parser_input(
buffer: tt::TokenTreesView<'_>,
span_to_edition: &mut dyn FnMut(SyntaxContext) -> Edition,
) -> parser::Input {
let mut res = parser::Input::with_capacity(buffer.len());

View file

@ -16,6 +16,7 @@ doctest = false
arrayvec.workspace = true
text-size.workspace = true
span = { path = "../span", version = "0.0", default-features = false }
stdx.workspace = true
intern.workspace = true
ra-ap-rustc_lexer.workspace = true

View file

@ -3,14 +3,14 @@
//! We use this as the source of tokens for parser.
use crate::{Leaf, Subtree, TokenTree, TokenTreesView};
pub struct Cursor<'a, Span> {
buffer: &'a [TokenTree<Span>],
pub struct Cursor<'a> {
buffer: &'a [TokenTree],
index: usize,
subtrees_stack: Vec<usize>,
}
impl<'a, Span: Copy> Cursor<'a, Span> {
pub fn new(buffer: &'a [TokenTree<Span>]) -> Self {
impl<'a> Cursor<'a> {
pub fn new(buffer: &'a [TokenTree]) -> Self {
Self { buffer, index: 0, subtrees_stack: Vec::new() }
}
@ -23,7 +23,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
self.subtrees_stack.is_empty()
}
fn last_subtree(&self) -> Option<(usize, &'a Subtree<Span>)> {
fn last_subtree(&self) -> Option<(usize, &'a Subtree)> {
self.subtrees_stack.last().map(|&subtree_idx| {
let TokenTree::Subtree(subtree) = &self.buffer[subtree_idx] else {
panic!("subtree pointing to non-subtree");
@ -32,7 +32,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
})
}
pub fn end(&mut self) -> &'a Subtree<Span> {
pub fn end(&mut self) -> &'a Subtree {
let (last_subtree_idx, last_subtree) =
self.last_subtree().expect("called `Cursor::end()` without an open subtree");
// +1 because `Subtree.len` excludes the subtree itself.
@ -46,7 +46,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
}
/// Returns the `TokenTree` at the cursor if it is not at the end of a subtree.
pub fn token_tree(&self) -> Option<&'a TokenTree<Span>> {
pub fn token_tree(&self) -> Option<&'a TokenTree> {
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
// +1 because `Subtree.len` excludes the subtree itself.
if last_subtree_idx + last_subtree.usize_len() + 1 == self.index {
@ -87,7 +87,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
self.index += 1;
}
pub fn peek_two_leaves(&self) -> Option<[&'a Leaf<Span>; 2]> {
pub fn peek_two_leaves(&self) -> Option<[&'a Leaf; 2]> {
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
// +1 because `Subtree.len` excludes the subtree itself.
let last_end = last_subtree_idx + last_subtree.usize_len() + 1;
@ -101,7 +101,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
})
}
pub fn crossed(&self) -> TokenTreesView<'a, Span> {
pub fn crossed(&self) -> TokenTreesView<'a> {
assert!(self.is_root());
TokenTreesView::new(&self.buffer[..self.index])
}

View file

@ -5,31 +5,32 @@ use std::fmt;
use arrayvec::ArrayVec;
use intern::sym;
use span::Span;
use crate::{Ident, Leaf, MAX_GLUED_PUNCT_LEN, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
#[derive(Clone)]
pub struct TtIter<'a, S> {
inner: std::slice::Iter<'a, TokenTree<S>>,
pub struct TtIter<'a> {
inner: std::slice::Iter<'a, TokenTree>,
}
impl<S: Copy + fmt::Debug> fmt::Debug for TtIter<'_, S> {
impl fmt::Debug for TtIter<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TtIter").field("remaining", &self.remaining()).finish()
}
}
#[derive(Clone, Copy)]
pub struct TtIterSavepoint<'a, S>(&'a [TokenTree<S>]);
pub struct TtIterSavepoint<'a>(&'a [TokenTree]);
impl<'a, S: Copy> TtIterSavepoint<'a, S> {
pub fn remaining(self) -> TokenTreesView<'a, S> {
impl<'a> TtIterSavepoint<'a> {
pub fn remaining(self) -> TokenTreesView<'a> {
TokenTreesView::new(self.0)
}
}
impl<'a, S: Copy> TtIter<'a, S> {
pub(crate) fn new(tt: &'a [TokenTree<S>]) -> TtIter<'a, S> {
impl<'a> TtIter<'a> {
pub(crate) fn new(tt: &'a [TokenTree]) -> TtIter<'a> {
TtIter { inner: tt.iter() }
}
@ -49,14 +50,14 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
pub fn expect_subtree(&mut self) -> Result<(&'a Subtree<S>, TtIter<'a, S>), ()> {
pub fn expect_subtree(&mut self) -> Result<(&'a Subtree, TtIter<'a>), ()> {
match self.next() {
Some(TtElement::Subtree(subtree, iter)) => Ok((subtree, iter)),
_ => Err(()),
}
}
pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> {
pub fn expect_leaf(&mut self) -> Result<&'a Leaf, ()> {
match self.next() {
Some(TtElement::Leaf(it)) => Ok(it),
_ => Err(()),
@ -77,21 +78,21 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> {
pub fn expect_ident(&mut self) -> Result<&'a Ident, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) if it.sym != sym::underscore => Ok(it),
_ => Err(()),
}
}
pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident<S>, ()> {
pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
pub fn expect_literal(&mut self) -> Result<&'a Leaf<S>, ()> {
pub fn expect_literal(&mut self) -> Result<&'a Leaf, ()> {
let it = self.expect_leaf()?;
match it {
Leaf::Literal(_) => Ok(it),
@ -100,7 +101,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
pub fn expect_single_punct(&mut self) -> Result<&'a Punct<S>, ()> {
pub fn expect_single_punct(&mut self) -> Result<&'a Punct, ()> {
match self.expect_leaf()? {
Leaf::Punct(it) => Ok(it),
_ => Err(()),
@ -111,7 +112,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, MAX_GLUED_PUNCT_LEN>, ()> {
pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct, MAX_GLUED_PUNCT_LEN>, ()> {
let TtElement::Leaf(&Leaf::Punct(first)) = self.next().ok_or(())? else {
return Err(());
};
@ -161,11 +162,11 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
/// This method won't check for subtrees, so the nth token tree may not be the nth sibling of the current tree.
fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> {
fn peek_n(&self, n: usize) -> Option<&'a TokenTree> {
self.inner.as_slice().get(n)
}
pub fn peek(&self) -> Option<TtElement<'a, S>> {
pub fn peek(&self) -> Option<TtElement<'a>> {
match self.inner.as_slice().first()? {
TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),
TokenTree::Subtree(subtree) => {
@ -181,11 +182,11 @@ impl<'a, S: Copy> TtIter<'a, S> {
self.inner.len() == 0
}
pub fn next_span(&self) -> Option<S> {
pub fn next_span(&self) -> Option<Span> {
Some(self.inner.as_slice().first()?.first_span())
}
pub fn remaining(&self) -> TokenTreesView<'a, S> {
pub fn remaining(&self) -> TokenTreesView<'a> {
TokenTreesView::new(self.inner.as_slice())
}
@ -194,17 +195,17 @@ impl<'a, S: Copy> TtIter<'a, S> {
self.inner = self.inner.as_slice()[skip..].iter();
}
pub fn savepoint(&self) -> TtIterSavepoint<'a, S> {
pub fn savepoint(&self) -> TtIterSavepoint<'a> {
TtIterSavepoint(self.inner.as_slice())
}
pub fn from_savepoint(&self, savepoint: TtIterSavepoint<'a, S>) -> TokenTreesView<'a, S> {
pub fn from_savepoint(&self, savepoint: TtIterSavepoint<'a>) -> TokenTreesView<'a> {
let len = (self.inner.as_slice().as_ptr() as usize - savepoint.0.as_ptr() as usize)
/ size_of::<TokenTree<S>>();
/ size_of::<TokenTree>();
TokenTreesView::new(&savepoint.0[..len])
}
pub fn next_as_view(&mut self) -> Option<TokenTreesView<'a, S>> {
pub fn next_as_view(&mut self) -> Option<TokenTreesView<'a>> {
let savepoint = self.savepoint();
self.next()?;
Some(self.from_savepoint(savepoint))
@ -212,12 +213,12 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
pub enum TtElement<'a> {
Leaf(&'a Leaf),
Subtree(&'a Subtree, TtIter<'a>),
}
impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> {
impl fmt::Debug for TtElement<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Leaf(leaf) => f.debug_tuple("Leaf").field(leaf).finish(),
@ -228,9 +229,9 @@ impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> {
}
}
impl<S: Copy> TtElement<'_, S> {
impl TtElement<'_> {
#[inline]
pub fn first_span(&self) -> S {
pub fn first_span(&self) -> Span {
match self {
TtElement::Leaf(it) => *it.span(),
TtElement::Subtree(it, _) => it.delimiter.open,
@ -238,8 +239,8 @@ impl<S: Copy> TtElement<'_, S> {
}
}
impl<'a, S> Iterator for TtIter<'a, S> {
type Item = TtElement<'a, S>;
impl<'a> Iterator for TtIter<'a> {
type Item = TtElement<'a>;
fn next(&mut self) -> Option<Self::Item> {
match self.inner.next()? {
TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),

View file

@ -20,11 +20,13 @@ use std::fmt;
use buffer::Cursor;
use intern::Symbol;
use iter::{TtElement, TtIter};
use stdx::{impl_from, itertools::Itertools as _};
pub use span::Span;
pub use text_size::{TextRange, TextSize};
pub use self::iter::{TtElement, TtIter};
pub const MAX_GLUED_PUNCT_LEN: usize = 3;
#[derive(Clone, PartialEq, Debug)]
@ -77,13 +79,13 @@ pub enum LitKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TokenTree<S = u32> {
Leaf(Leaf<S>),
Subtree(Subtree<S>),
pub enum TokenTree {
Leaf(Leaf),
Subtree(Subtree),
}
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
impl<S: Copy> TokenTree<S> {
pub fn first_span(&self) -> S {
impl_from!(Leaf, Subtree for TokenTree);
impl TokenTree {
pub fn first_span(&self) -> Span {
match self {
TokenTree::Leaf(l) => *l.span(),
TokenTree::Subtree(s) => s.delimiter.open,
@ -92,14 +94,14 @@ impl<S: Copy> TokenTree<S> {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Leaf<S> {
Literal(Literal<S>),
Punct(Punct<S>),
Ident(Ident<S>),
pub enum Leaf {
Literal(Literal),
Punct(Punct),
Ident(Ident),
}
impl<S> Leaf<S> {
pub fn span(&self) -> &S {
impl Leaf {
pub fn span(&self) -> &Span {
match self {
Leaf::Literal(it) => &it.span,
Leaf::Punct(it) => &it.span,
@ -107,81 +109,81 @@ impl<S> Leaf<S> {
}
}
}
impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
impl_from!(Literal, Punct, Ident for Leaf);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Subtree<S> {
pub delimiter: Delimiter<S>,
pub struct Subtree {
pub delimiter: Delimiter,
/// Number of following token trees that belong to this subtree, excluding this subtree.
pub len: u32,
}
impl<S> Subtree<S> {
impl Subtree {
pub fn usize_len(&self) -> usize {
self.len as usize
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct TopSubtree<S>(pub Box<[TokenTree<S>]>);
pub struct TopSubtree(pub Box<[TokenTree]>);
impl<S: Copy> TopSubtree<S> {
pub fn empty(span: DelimSpan<S>) -> Self {
impl TopSubtree {
pub fn empty(span: DelimSpan) -> Self {
Self(Box::new([TokenTree::Subtree(Subtree {
delimiter: Delimiter::invisible_delim_spanned(span),
len: 0,
})]))
}
pub fn invisible_from_leaves<const N: usize>(delim_span: S, leaves: [Leaf<S>; N]) -> Self {
pub fn invisible_from_leaves<const N: usize>(delim_span: Span, leaves: [Leaf; N]) -> Self {
let mut builder = TopSubtreeBuilder::new(Delimiter::invisible_spanned(delim_span));
builder.extend(leaves);
builder.build()
}
pub fn from_token_trees(delimiter: Delimiter<S>, token_trees: TokenTreesView<'_, S>) -> Self {
pub fn from_token_trees(delimiter: Delimiter, token_trees: TokenTreesView<'_>) -> Self {
let mut builder = TopSubtreeBuilder::new(delimiter);
builder.extend_with_tt(token_trees);
builder.build()
}
pub fn from_subtree(subtree: SubtreeView<'_, S>) -> Self {
pub fn from_subtree(subtree: SubtreeView<'_>) -> Self {
Self(subtree.0.into())
}
pub fn view(&self) -> SubtreeView<'_, S> {
pub fn view(&self) -> SubtreeView<'_> {
SubtreeView::new(&self.0)
}
pub fn iter(&self) -> TtIter<'_, S> {
pub fn iter(&self) -> TtIter<'_> {
self.view().iter()
}
pub fn top_subtree(&self) -> &Subtree<S> {
pub fn top_subtree(&self) -> &Subtree {
self.view().top_subtree()
}
pub fn top_subtree_delimiter_mut(&mut self) -> &mut Delimiter<S> {
pub fn top_subtree_delimiter_mut(&mut self) -> &mut Delimiter {
let TokenTree::Subtree(subtree) = &mut self.0[0] else {
unreachable!("the first token tree is always the top subtree");
};
&mut subtree.delimiter
}
pub fn token_trees(&self) -> TokenTreesView<'_, S> {
pub fn token_trees(&self) -> TokenTreesView<'_> {
self.view().token_trees()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TopSubtreeBuilder<S> {
pub struct TopSubtreeBuilder {
unclosed_subtree_indices: Vec<usize>,
token_trees: Vec<TokenTree<S>>,
token_trees: Vec<TokenTree>,
last_closed_subtree: Option<usize>,
}
impl<S: Copy> TopSubtreeBuilder<S> {
pub fn new(top_delimiter: Delimiter<S>) -> Self {
impl TopSubtreeBuilder {
pub fn new(top_delimiter: Delimiter) -> Self {
let mut result = Self {
unclosed_subtree_indices: Vec::new(),
token_trees: Vec::new(),
@ -192,7 +194,7 @@ impl<S: Copy> TopSubtreeBuilder<S> {
result
}
pub fn open(&mut self, delimiter_kind: DelimiterKind, open_span: S) {
pub fn open(&mut self, delimiter_kind: DelimiterKind, open_span: Span) {
self.unclosed_subtree_indices.push(self.token_trees.len());
self.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
@ -204,7 +206,7 @@ impl<S: Copy> TopSubtreeBuilder<S> {
}));
}
pub fn close(&mut self, close_span: S) {
pub fn close(&mut self, close_span: Span) {
let last_unclosed_index = self
.unclosed_subtree_indices
.pop()
@ -231,26 +233,26 @@ impl<S: Copy> TopSubtreeBuilder<S> {
}
}
pub fn push(&mut self, leaf: Leaf<S>) {
pub fn push(&mut self, leaf: Leaf) {
self.token_trees.push(TokenTree::Leaf(leaf));
}
pub fn extend(&mut self, leaves: impl IntoIterator<Item = Leaf<S>>) {
pub fn extend(&mut self, leaves: impl IntoIterator<Item = Leaf>) {
self.token_trees.extend(leaves.into_iter().map(TokenTree::Leaf));
}
/// This does not check the token trees are valid, beware!
pub fn extend_tt_dangerous(&mut self, tt: impl IntoIterator<Item = TokenTree<S>>) {
pub fn extend_tt_dangerous(&mut self, tt: impl IntoIterator<Item = TokenTree>) {
self.token_trees.extend(tt);
}
pub fn extend_with_tt(&mut self, tt: TokenTreesView<'_, S>) {
pub fn extend_with_tt(&mut self, tt: TokenTreesView<'_>) {
self.token_trees.extend(tt.0.iter().cloned());
}
/// Like [`Self::extend_with_tt()`], but makes sure the new tokens will never be
/// joint with whatever comes after them.
pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_, S>) {
pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_>) {
if let Some((last, before_last)) = tt.0.split_last() {
self.token_trees.reserve(tt.0.len());
self.token_trees.extend(before_last.iter().cloned());
@ -265,7 +267,7 @@ impl<S: Copy> TopSubtreeBuilder<S> {
}
}
pub fn expected_delimiters(&self) -> impl Iterator<Item = &Delimiter<S>> {
pub fn expected_delimiters(&self) -> impl Iterator<Item = &Delimiter> {
self.unclosed_subtree_indices.iter().rev().map(|&subtree_idx| {
let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
unreachable!("unclosed token tree is always a subtree")
@ -275,7 +277,7 @@ impl<S: Copy> TopSubtreeBuilder<S> {
}
/// Builds, and remove the top subtree if it has only one subtree child.
pub fn build_skip_top_subtree(mut self) -> TopSubtree<S> {
pub fn build_skip_top_subtree(mut self) -> TopSubtree {
let top_tts = TokenTreesView::new(&self.token_trees[1..]);
match top_tts.try_into_subtree() {
Some(_) => {
@ -289,7 +291,7 @@ impl<S: Copy> TopSubtreeBuilder<S> {
}
}
pub fn build(mut self) -> TopSubtree<S> {
pub fn build(mut self) -> TopSubtree {
assert!(
self.unclosed_subtree_indices.is_empty(),
"attempt to build an unbalanced `TopSubtreeBuilder`"
@ -325,10 +327,10 @@ pub struct SubtreeBuilderRestorePoint {
}
#[derive(Clone, Copy)]
pub struct TokenTreesView<'a, S>(&'a [TokenTree<S>]);
pub struct TokenTreesView<'a>(&'a [TokenTree]);
impl<'a, S: Copy> TokenTreesView<'a, S> {
pub fn new(tts: &'a [TokenTree<S>]) -> Self {
impl<'a> TokenTreesView<'a> {
pub fn new(tts: &'a [TokenTree]) -> Self {
if cfg!(debug_assertions) {
tts.iter().enumerate().for_each(|(idx, tt)| {
if let TokenTree::Subtree(tt) = &tt {
@ -343,11 +345,11 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
Self(tts)
}
pub fn iter(&self) -> TtIter<'a, S> {
pub fn iter(&self) -> TtIter<'a> {
TtIter::new(self.0)
}
pub fn cursor(&self) -> Cursor<'a, S> {
pub fn cursor(&self) -> Cursor<'a> {
Cursor::new(self.0)
}
@ -359,7 +361,7 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
self.0.is_empty()
}
pub fn try_into_subtree(self) -> Option<SubtreeView<'a, S>> {
pub fn try_into_subtree(self) -> Option<SubtreeView<'a>> {
if let Some(TokenTree::Subtree(subtree)) = self.0.first()
&& subtree.usize_len() == (self.0.len() - 1)
{
@ -368,21 +370,21 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
None
}
pub fn strip_invisible(self) -> TokenTreesView<'a, S> {
pub fn strip_invisible(self) -> TokenTreesView<'a> {
self.try_into_subtree().map(|subtree| subtree.strip_invisible()).unwrap_or(self)
}
/// This returns a **flat** structure of tokens (subtrees will be represented by a single node
/// preceding their children), so it isn't suited for most use cases, only for matching leaves
/// at the beginning/end with no subtrees before them. If you need a structured pass, use [`TtIter`].
pub fn flat_tokens(&self) -> &'a [TokenTree<S>] {
pub fn flat_tokens(&self) -> &'a [TokenTree] {
self.0
}
pub fn split(
self,
mut split_fn: impl FnMut(TtElement<'a, S>) -> bool,
) -> impl Iterator<Item = TokenTreesView<'a, S>> {
mut split_fn: impl FnMut(TtElement<'a>) -> bool,
) -> impl Iterator<Item = TokenTreesView<'a>> {
let mut subtree_iter = self.iter();
let mut need_to_yield_even_if_empty = true;
@ -406,7 +408,7 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
}
}
impl<S: fmt::Debug + Copy> fmt::Debug for TokenTreesView<'_, S> {
impl fmt::Debug for TokenTreesView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut iter = self.iter();
while let Some(tt) = iter.next() {
@ -419,14 +421,14 @@ impl<S: fmt::Debug + Copy> fmt::Debug for TokenTreesView<'_, S> {
}
}
impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
impl fmt::Display for TokenTreesView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
return token_trees_display(f, self.iter());
fn subtree_display<S>(
subtree: &Subtree<S>,
fn subtree_display(
subtree: &Subtree,
f: &mut fmt::Formatter<'_>,
iter: TtIter<'_, S>,
iter: TtIter<'_>,
) -> fmt::Result {
let (l, r) = match subtree.delimiter.kind {
DelimiterKind::Parenthesis => ("(", ")"),
@ -440,7 +442,7 @@ impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
Ok(())
}
fn token_trees_display<S>(f: &mut fmt::Formatter<'_>, iter: TtIter<'_, S>) -> fmt::Result {
fn token_trees_display(f: &mut fmt::Formatter<'_>, iter: TtIter<'_>) -> fmt::Result {
let mut needs_space = false;
for child in iter {
if needs_space {
@ -466,10 +468,10 @@ impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
#[derive(Clone, Copy)]
// Invariant: always starts with `Subtree` that covers the entire thing.
pub struct SubtreeView<'a, S>(&'a [TokenTree<S>]);
pub struct SubtreeView<'a>(&'a [TokenTree]);
impl<'a, S: Copy> SubtreeView<'a, S> {
pub fn new(tts: &'a [TokenTree<S>]) -> Self {
impl<'a> SubtreeView<'a> {
pub fn new(tts: &'a [TokenTree]) -> Self {
if cfg!(debug_assertions) {
let TokenTree::Subtree(subtree) = &tts[0] else {
panic!("first token tree must be a subtree in `SubtreeView`");
@ -483,22 +485,22 @@ impl<'a, S: Copy> SubtreeView<'a, S> {
Self(tts)
}
pub fn as_token_trees(self) -> TokenTreesView<'a, S> {
pub fn as_token_trees(self) -> TokenTreesView<'a> {
TokenTreesView::new(self.0)
}
pub fn iter(&self) -> TtIter<'a, S> {
pub fn iter(&self) -> TtIter<'a> {
TtIter::new(&self.0[1..])
}
pub fn top_subtree(&self) -> &'a Subtree<S> {
pub fn top_subtree(&self) -> &'a Subtree {
let TokenTree::Subtree(subtree) = &self.0[0] else {
unreachable!("the first token tree is always the top subtree");
};
subtree
}
pub fn strip_invisible(&self) -> TokenTreesView<'a, S> {
pub fn strip_invisible(&self) -> TokenTreesView<'a> {
if self.top_subtree().delimiter.kind == DelimiterKind::Invisible {
TokenTreesView::new(&self.0[1..])
} else {
@ -506,30 +508,30 @@ impl<'a, S: Copy> SubtreeView<'a, S> {
}
}
pub fn token_trees(&self) -> TokenTreesView<'a, S> {
pub fn token_trees(&self) -> TokenTreesView<'a> {
TokenTreesView::new(&self.0[1..])
}
}
impl<S: fmt::Debug + Copy> fmt::Debug for SubtreeView<'_, S> {
impl fmt::Debug for SubtreeView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&TokenTreesView(self.0), f)
}
}
impl<S: Copy> fmt::Display for SubtreeView<'_, S> {
impl fmt::Display for SubtreeView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&TokenTreesView(self.0), f)
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct DelimSpan<S> {
pub open: S,
pub close: S,
pub struct DelimSpan {
pub open: Span,
pub close: Span,
}
impl<Span: Copy> DelimSpan<Span> {
impl DelimSpan {
pub fn from_single(sp: Span) -> Self {
DelimSpan { open: sp, close: sp }
}
@ -539,22 +541,22 @@ impl<Span: Copy> DelimSpan<Span> {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct Delimiter<S> {
pub open: S,
pub close: S,
pub struct Delimiter {
pub open: Span,
pub close: Span,
pub kind: DelimiterKind,
}
impl<S: Copy> Delimiter<S> {
pub const fn invisible_spanned(span: S) -> Self {
impl Delimiter {
pub const fn invisible_spanned(span: Span) -> Self {
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
}
pub const fn invisible_delim_spanned(span: DelimSpan<S>) -> Self {
pub const fn invisible_delim_spanned(span: DelimSpan) -> Self {
Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible }
}
pub fn delim_span(&self) -> DelimSpan<S> {
pub fn delim_span(&self) -> DelimSpan {
DelimSpan { open: self.open, close: self.close }
}
}
@ -568,17 +570,17 @@ pub enum DelimiterKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Literal<S> {
pub struct Literal {
// escaped
pub symbol: Symbol,
pub span: S,
pub span: Span,
pub kind: LitKind,
pub suffix: Option<Symbol>,
}
pub fn token_to_literal<S>(text: &str, span: S) -> Literal<S>
pub fn token_to_literal(text: &str, span: Span) -> Literal
where
S: Copy,
Span: Copy,
{
use rustc_lexer::LiteralKind;
@ -641,10 +643,10 @@ where
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Punct<S> {
pub struct Punct {
pub char: char,
pub spacing: Spacing,
pub span: S,
pub span: Span,
}
/// Indicates whether a token can join with the following token to form a
@ -709,25 +711,25 @@ pub enum Spacing {
/// Identifier or keyword.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Ident<S> {
pub struct Ident {
pub sym: Symbol,
pub span: S,
pub span: Span,
pub is_raw: IdentIsRaw,
}
impl<S> Ident<S> {
pub fn new(text: &str, span: S) -> Self {
impl Ident {
pub fn new(text: &str, span: Span) -> Self {
// let raw_stripped = IdentIsRaw::split_from_symbol(text.as_ref());
let (is_raw, text) = IdentIsRaw::split_from_symbol(text);
Ident { sym: Symbol::intern(text), span, is_raw }
}
}
fn print_debug_subtree<S: fmt::Debug>(
fn print_debug_subtree(
f: &mut fmt::Formatter<'_>,
subtree: &Subtree<S>,
subtree: &Subtree,
level: usize,
iter: TtIter<'_, S>,
iter: TtIter<'_>,
) -> fmt::Result {
let align = " ".repeat(level);
@ -751,11 +753,7 @@ fn print_debug_subtree<S: fmt::Debug>(
Ok(())
}
fn print_debug_token<S: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
level: usize,
tt: TtElement<'_, S>,
) -> fmt::Result {
fn print_debug_token(f: &mut fmt::Formatter<'_>, level: usize, tt: TtElement<'_>) -> fmt::Result {
let align = " ".repeat(level);
match tt {
@ -800,19 +798,19 @@ fn print_debug_token<S: fmt::Debug>(
Ok(())
}
impl<S: fmt::Debug + Copy> fmt::Debug for TopSubtree<S> {
impl fmt::Debug for TopSubtree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.view(), f)
}
}
impl<S: fmt::Display + Copy> fmt::Display for TopSubtree<S> {
impl fmt::Display for TopSubtree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.view(), f)
}
}
impl<S> fmt::Display for Leaf<S> {
impl fmt::Display for Leaf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Leaf::Ident(it) => fmt::Display::fmt(it, f),
@ -822,14 +820,14 @@ impl<S> fmt::Display for Leaf<S> {
}
}
impl<S> fmt::Display for Ident<S> {
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.is_raw.as_str(), f)?;
fmt::Display::fmt(&self.sym, f)
}
}
impl<S> fmt::Display for Literal<S> {
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
LitKind::Byte => write!(f, "b'{}'", self.symbol),
@ -873,26 +871,26 @@ impl<S> fmt::Display for Literal<S> {
}
}
impl<S> fmt::Display for Punct<S> {
impl fmt::Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.char, f)
}
}
impl<S> Subtree<S> {
impl Subtree {
/// Count the number of tokens recursively
pub fn count(&self) -> usize {
self.usize_len()
}
}
impl<S> TopSubtree<S> {
impl TopSubtree {
/// A simple line string used for debugging
pub fn subtree_as_debug_string(&self, subtree_idx: usize) -> String {
fn debug_subtree<S>(
fn debug_subtree(
output: &mut String,
subtree: &Subtree<S>,
iter: &mut std::slice::Iter<'_, TokenTree<S>>,
subtree: &Subtree,
iter: &mut std::slice::Iter<'_, TokenTree>,
) {
let delim = match subtree.delimiter.kind {
DelimiterKind::Brace => ("{", "}"),
@ -914,11 +912,11 @@ impl<S> TopSubtree<S> {
output.push_str(delim.1);
}
fn debug_token_tree<S>(
fn debug_token_tree(
output: &mut String,
tt: &TokenTree<S>,
last: Option<&TokenTree<S>>,
iter: &mut std::slice::Iter<'_, TokenTree<S>>,
tt: &TokenTree,
last: Option<&TokenTree>,
iter: &mut std::slice::Iter<'_, TokenTree>,
) {
match tt {
TokenTree::Leaf(it) => {
@ -958,8 +956,8 @@ impl<S> TopSubtree<S> {
}
}
pub fn pretty<S>(mut tkns: &[TokenTree<S>]) -> String {
fn tokentree_to_text<S>(tkn: &TokenTree<S>, tkns: &mut &[TokenTree<S>]) -> String {
pub fn pretty(mut tkns: &[TokenTree]) -> String {
fn tokentree_to_text(tkn: &TokenTree, tkns: &mut &[TokenTree]) -> String {
match tkn {
TokenTree::Leaf(Leaf::Ident(ident)) => {
format!("{}{}", ident.is_raw.as_str(), ident.sym)