Merge #8977
8977: internal: minor `TokenMap` cleanups r=jonas-schievink a=jonas-schievink bors r+ Co-authored-by: Jonas Schievink <jonasschievink@gmail.com>
This commit is contained in:
commit
f5f24a9a2c
7 changed files with 101 additions and 90 deletions
|
|
@ -155,7 +155,7 @@ pub fn expand_speculative(
|
|||
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
|
||||
|
||||
let token_id = macro_def.map_id_down(token_id);
|
||||
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
|
||||
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
|
||||
let token = node.syntax_node().covering_element(range).into_token()?;
|
||||
Some((node.syntax_node(), token))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ impl HygieneInfo {
|
|||
},
|
||||
};
|
||||
|
||||
let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?;
|
||||
let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
|
||||
Some((tt.with_value(range + tt.value), origin))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ impl ExpansionInfo {
|
|||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.map_id_down(token_id);
|
||||
|
||||
let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
|
||||
let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
|
||||
|
||||
let token = self.expanded.value.covering_element(range).into_token()?;
|
||||
|
||||
|
|
@ -354,7 +354,7 @@ impl ExpansionInfo {
|
|||
},
|
||||
};
|
||||
|
||||
let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
|
||||
let range = token_map.range_by_token(token_id, token.value.kind())?;
|
||||
let token =
|
||||
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
||||
Some((tt.with_value(token), origin))
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ mod tests;
|
|||
|
||||
#[cfg(test)]
|
||||
mod benchmark;
|
||||
mod token_map;
|
||||
|
||||
use std::fmt;
|
||||
|
||||
|
|
@ -63,9 +64,12 @@ impl fmt::Display for ExpandError {
|
|||
}
|
||||
}
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
||||
token_tree_to_syntax_node, TokenMap,
|
||||
pub use crate::{
|
||||
syntax_bridge::{
|
||||
ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
||||
token_tree_to_syntax_node,
|
||||
},
|
||||
token_map::TokenMap,
|
||||
};
|
||||
|
||||
/// This struct contains AST for a single `macro_rules` definition. What might
|
||||
|
|
|
|||
|
|
@ -10,36 +10,8 @@ use syntax::{
|
|||
};
|
||||
use tt::buffer::{Cursor, TokenBuffer};
|
||||
|
||||
use crate::ExpandError;
|
||||
use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum TokenTextRange {
|
||||
Token(TextRange),
|
||||
Delimiter(TextRange),
|
||||
}
|
||||
|
||||
impl TokenTextRange {
|
||||
pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
|
||||
match self {
|
||||
TokenTextRange::Token(it) => Some(it),
|
||||
TokenTextRange::Delimiter(it) => match kind {
|
||||
T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
|
||||
T!['}'] | T![')'] | T![']'] => {
|
||||
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
pub struct TokenMap {
|
||||
/// Maps `tt::TokenId` to the *relative* source range.
|
||||
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
||||
}
|
||||
use crate::{ExpandError, TokenMap};
|
||||
|
||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
|
|
@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
|||
let global_offset = node.text_range().start();
|
||||
let mut c = Convertor::new(node, global_offset);
|
||||
let subtree = c.go();
|
||||
c.id_alloc.map.entries.shrink_to_fit();
|
||||
c.id_alloc.map.shrink_to_fit();
|
||||
(subtree, c.id_alloc.map)
|
||||
}
|
||||
|
||||
|
|
@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
|
|||
res
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
|
||||
TokenTextRange::Token(it) => *it == relative_range,
|
||||
TokenTextRange::Delimiter(it) => {
|
||||
let open = TextRange::at(it.start(), 1.into());
|
||||
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
|
||||
open == relative_range || close == relative_range
|
||||
}
|
||||
})?;
|
||||
Some(token_id)
|
||||
}
|
||||
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
|
||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
||||
Some(range)
|
||||
}
|
||||
|
||||
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
||||
}
|
||||
|
||||
fn insert_delim(
|
||||
&mut self,
|
||||
token_id: tt::TokenId,
|
||||
open_relative_range: TextRange,
|
||||
close_relative_range: TextRange,
|
||||
) -> usize {
|
||||
let res = self.entries.len();
|
||||
let cover = open_relative_range.cover(close_relative_range);
|
||||
|
||||
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
|
||||
res
|
||||
}
|
||||
|
||||
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
||||
let (_, token_text_range) = &mut self.entries[idx];
|
||||
if let TokenTextRange::Delimiter(dim) = token_text_range {
|
||||
let cover = dim.cover(close_relative_range);
|
||||
*token_text_range = TokenTextRange::Delimiter(cover);
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_delim(&mut self, idx: usize) {
|
||||
// FIXME: This could be accidentally quadratic
|
||||
self.entries.remove(idx);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the textual content of a doc comment block as a quoted string
|
||||
/// That is, strips leading `///` (or `/**`, etc)
|
||||
/// and strips the ending `*/`
|
||||
|
|
@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> {
|
|||
}
|
||||
|
||||
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
|
||||
self.token_map.entries.shrink_to_fit();
|
||||
self.token_map.shrink_to_fit();
|
||||
(self.inner.finish(), self.token_map)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,9 +58,8 @@ macro_rules! foobar {
|
|||
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
|
||||
let content = node.syntax_node().to_string();
|
||||
|
||||
let get_text = |id, kind| -> String {
|
||||
content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
|
||||
};
|
||||
let get_text =
|
||||
|id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
|
||||
|
||||
assert_eq!(expanded.token_trees.len(), 4);
|
||||
// {($e:ident) => { fn $e() {} }}
|
||||
|
|
|
|||
85
crates/mbe/src/token_map.rs
Normal file
85
crates/mbe/src/token_map.rs
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
|
||||
|
||||
use parser::{SyntaxKind, T};
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
enum TokenTextRange {
|
||||
Token(TextRange),
|
||||
Delimiter(TextRange),
|
||||
}
|
||||
|
||||
impl TokenTextRange {
|
||||
fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
|
||||
match self {
|
||||
TokenTextRange::Token(it) => Some(it),
|
||||
TokenTextRange::Delimiter(it) => match kind {
|
||||
T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
|
||||
T!['}'] | T![')'] | T![']'] => {
|
||||
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
pub struct TokenMap {
|
||||
/// Maps `tt::TokenId` to the *relative* source range.
|
||||
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
|
||||
TokenTextRange::Token(it) => *it == relative_range,
|
||||
TokenTextRange::Delimiter(it) => {
|
||||
let open = TextRange::at(it.start(), 1.into());
|
||||
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
|
||||
open == relative_range || close == relative_range
|
||||
}
|
||||
})?;
|
||||
Some(token_id)
|
||||
}
|
||||
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
|
||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
||||
range.by_kind(kind)
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.entries.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
||||
}
|
||||
|
||||
pub(crate) fn insert_delim(
|
||||
&mut self,
|
||||
token_id: tt::TokenId,
|
||||
open_relative_range: TextRange,
|
||||
close_relative_range: TextRange,
|
||||
) -> usize {
|
||||
let res = self.entries.len();
|
||||
let cover = open_relative_range.cover(close_relative_range);
|
||||
|
||||
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
||||
let (_, token_text_range) = &mut self.entries[idx];
|
||||
if let TokenTextRange::Delimiter(dim) = token_text_range {
|
||||
let cover = dim.cover(close_relative_range);
|
||||
*token_text_range = TokenTextRange::Delimiter(cover);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn remove_delim(&mut self, idx: usize) {
|
||||
// FIXME: This could be accidentally quadratic
|
||||
self.entries.remove(idx);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue