Add always disabled gen parse support

This commit is contained in:
Lukas Wirth 2024-07-17 10:49:12 +02:00
parent 373e84fb28
commit 0bffb1345b
32 changed files with 376 additions and 55 deletions

View file

@ -301,7 +301,10 @@ impl ExprCollector<'_> {
result_expr_id
})
}
None => self.collect_block(e),
// FIXME
Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => {
self.collect_block(e)
}
},
ast::Expr::LoopExpr(e) => {
let label = e.label().map(|label| self.collect_label(label));

View file

@ -728,6 +728,8 @@ fn include_expand(
}
};
match parse_to_token_tree(
// FIXME
Edition::CURRENT,
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),

View file

@ -3,7 +3,7 @@ use syntax::{
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
ted, NodeOrToken,
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
SyntaxNode,
SyntaxNode, T,
};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@ -26,8 +26,8 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let node = if ctx.has_empty_selection() {
if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
expr_stmt.syntax().clone()
if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) {
t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone()
} else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
} else {

View file

@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> {
fn is_valid_name(name: &str) -> bool {
matches!(
ide_db::syntax_helpers::LexedStr::single_token(name),
ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT, name),
Some((syntax::SyntaxKind::IDENT, _error))
)
}

View file

@ -25,7 +25,7 @@ use std::fmt;
use base_db::{AnchoredPathBuf, FileId, FileRange};
use either::Either;
use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
use span::SyntaxContextId;
use span::{Edition, SyntaxContextId};
use stdx::{never, TupleExt};
use syntax::{
ast::{self, HasName},
@ -227,7 +227,8 @@ fn rename_mod(
module: hir::Module,
new_name: &str,
) -> Result<SourceChange> {
if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident
{
bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
}
@ -313,7 +314,12 @@ fn rename_reference(
def: Definition,
new_name: &str,
) -> Result<SourceChange> {
let ident_kind = IdentifierKind::classify(new_name)?;
let ident_kind = IdentifierKind::classify(
def.krate(sema.db)
.ok_or_else(|| RenameError("definition has no krate?".into()))?
.edition(sema.db),
new_name,
)?;
if matches!(
def,
@ -605,8 +611,8 @@ pub enum IdentifierKind {
}
impl IdentifierKind {
pub fn classify(new_name: &str) -> Result<IdentifierKind> {
match parser::LexedStr::single_token(new_name) {
pub fn classify(edition: Edition, new_name: &str) -> Result<IdentifierKind> {
match parser::LexedStr::single_token(edition, new_name) {
Some(res) => match res {
(SyntaxKind::IDENT, _) => {
if let Some(inner) = new_name.strip_prefix("r#") {

View file

@ -277,6 +277,8 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
});
}
Some(ast::BlockModifier::Unsafe(_)) => (),
Some(ast::BlockModifier::Gen(_)) => (),
Some(ast::BlockModifier::AsyncGen(_)) => (),
None => (),
}
if let Some(stmt_list) = b.stmt_list() {

View file

@ -255,7 +255,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
}
fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
let lexed = parser::LexedStr::new(source);
let lexed = parser::LexedStr::new(parser::Edition::CURRENT, source);
if let Some((_, first_error)) = lexed.errors().next() {
bail!("Failed to parse pattern: {}", first_error);
}

View file

@ -13,6 +13,7 @@ use ide_db::{
RootDatabase,
};
use itertools::Itertools;
use span::Edition;
use stdx::{always, never};
use syntax::{
ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
@ -99,7 +100,7 @@ pub(crate) fn rename(
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
// properly find "direct" usages/references.
.map(|(.., def)| {
match IdentifierKind::classify(new_name)? {
match IdentifierKind::classify(Edition::CURRENT, new_name)? {
IdentifierKind::Ident => (),
IdentifierKind::Lifetime => {
bail!("Cannot alias reference to a lifetime identifier")
@ -391,7 +392,7 @@ fn rename_self_to_param(
return Ok(SourceChange::default());
}
let identifier_kind = IdentifierKind::classify(new_name)?;
let identifier_kind = IdentifierKind::classify(Edition::CURRENT, new_name)?;
let InFile { file_id, value: self_param } =
sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;

View file

@ -169,6 +169,7 @@ where
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
/// anchor with the given context.
pub fn parse_to_token_tree<Ctx>(
edition: Edition,
anchor: SpanAnchor,
ctx: Ctx,
text: &str,
@ -177,7 +178,7 @@ where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
{
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
}
@ -187,11 +188,15 @@ where
}
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
pub fn parse_to_token_tree_static_span<S>(
edition: Edition,
span: S,
text: &str,
) -> Option<tt::Subtree<S>>
where
S: Copy + fmt::Debug,
{
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
}

View file

@ -12,8 +12,13 @@ pub enum Edition {
}
impl Edition {
/// The current latest stable edition, note this is usually not the right choice in code.
pub const CURRENT: Edition = Edition::Edition2021;
pub const DEFAULT: Edition = Edition::Edition2015;
pub fn at_least_2024(self) -> bool {
self >= Edition::Edition2024
}
}
#[derive(Debug)]

View file

@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
T![const],
T![continue],
T![do],
T![gen],
T![for],
T![if],
T![let],
@ -138,15 +139,37 @@ pub(super) fn atom_expr(
// fn f() { const { } }
// fn f() { async { } }
// fn f() { async move { } }
T![const] | T![unsafe] | T![async] if la == T!['{'] => {
T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => {
let m = p.start();
p.bump_any();
stmt_list(p);
m.complete(p, BLOCK_EXPR)
}
T![async] if la == T![move] && p.nth(2) == T!['{'] => {
// test_err gen_blocks
// pub fn main() {
// gen { yield ""; };
// async gen { yield ""; };
// gen move { yield ""; };
// async gen move { yield ""; };
// }
T![async] if la == T![gen] && p.nth(2) == T!['{'] => {
let m = p.start();
p.bump(T![async]);
p.eat(T![gen]);
stmt_list(p);
m.complete(p, BLOCK_EXPR)
}
T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => {
let m = p.start();
p.bump_any();
p.bump(T![move]);
stmt_list(p);
m.complete(p, BLOCK_EXPR)
}
T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => {
let m = p.start();
p.bump(T![async]);
p.bump(T![gen]);
p.bump(T![move]);
stmt_list(p);
m.complete(p, BLOCK_EXPR)
@ -355,6 +378,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
p.eat(T![const]);
p.eat(T![static]);
p.eat(T![async]);
p.eat(T![gen]);
p.eat(T![move]);
if !p.at(T![|]) {

View file

@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
// test_err async_without_semicolon
// fn foo() { let _ = async {} }
if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
if p.at(T![async])
&& (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|])
|| matches!((p.nth(1), p.nth(2)), (T![gen], T![fn])))
{
p.eat(T![async]);
has_mods = true;
}
// test_err gen_fn
// gen fn gen_fn() {}
// async gen fn async_gen_fn() {}
if p.at(T![gen]) && p.nth(1) == T![fn] {
p.eat(T![gen]);
has_mods = true;
}
// test_err unsafe_block_in_mod
// fn foo(){} unsafe { } fn bar(){}
if p.at(T![unsafe]) && p.nth(1) != T!['{'] {

View file

@ -13,6 +13,7 @@ use std::ops;
use rustc_lexer::unescape::{EscapeError, Mode};
use crate::{
Edition,
SyntaxKind::{self, *},
T,
};
@ -30,9 +31,9 @@ struct LexError {
}
impl<'a> LexedStr<'a> {
pub fn new(text: &'a str) -> LexedStr<'a> {
pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> {
let _p = tracing::info_span!("LexedStr::new").entered();
let mut conv = Converter::new(text);
let mut conv = Converter::new(edition, text);
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
conv.res.push(SHEBANG, conv.offset);
conv.offset = shebang_len;
@ -47,7 +48,7 @@ impl<'a> LexedStr<'a> {
conv.finalize_with_eof()
}
pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
pub fn single_token(edition: Edition, text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
if text.is_empty() {
return None;
}
@ -57,7 +58,7 @@ impl<'a> LexedStr<'a> {
return None;
}
let mut conv = Converter::new(text);
let mut conv = Converter::new(edition, text);
conv.extend_token(&token.kind, text);
match &*conv.res.kind {
[kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))),
@ -129,13 +130,15 @@ impl<'a> LexedStr<'a> {
struct Converter<'a> {
res: LexedStr<'a>,
offset: usize,
edition: Edition,
}
impl<'a> Converter<'a> {
fn new(text: &'a str) -> Self {
fn new(edition: Edition, text: &'a str) -> Self {
Self {
res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
offset: 0,
edition,
}
}
@ -175,6 +178,11 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Whitespace => WHITESPACE,
rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
rustc_lexer::TokenKind::Ident
if token_text == "gen" && !self.edition.at_least_2024() =>
{
IDENT
}
rustc_lexer::TokenKind::Ident => {
SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
}

File diff suppressed because one or more lines are too long

View file

@ -9,7 +9,7 @@ use std::{
use expect_test::expect_file;
use crate::{LexedStr, TopEntryPoint};
use crate::{Edition, LexedStr, TopEntryPoint};
#[test]
fn lex_ok() {
@ -30,7 +30,7 @@ fn lex_err() {
}
fn lex(text: &str) -> String {
let lexed = LexedStr::new(text);
let lexed = LexedStr::new(Edition::CURRENT, text);
let mut res = String::new();
for i in 0..lexed.len() {
@ -85,9 +85,9 @@ fn parse_inline_err() {
}
fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
let lexed = LexedStr::new(text);
let lexed = LexedStr::new(Edition::CURRENT, text);
let input = lexed.to_input();
let output = entry.parse(&input, crate::Edition::CURRENT);
let output = entry.parse(&input, Edition::CURRENT);
let mut buf = String::new();
let mut errors = Vec::new();

View file

@ -1,4 +1,4 @@
use crate::{LexedStr, PrefixEntryPoint, Step};
use crate::{Edition, LexedStr, PrefixEntryPoint, Step};
#[test]
fn vis() {
@ -82,11 +82,11 @@ fn meta_item() {
#[track_caller]
fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
let lexed = LexedStr::new(input);
let lexed = LexedStr::new(Edition::CURRENT, input);
let input = lexed.to_input();
let mut n_tokens = 0;
for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
for step in entry.parse(&input, Edition::CURRENT).iter() {
match step {
Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
Step::FloatSplit { .. } => n_tokens += 1,

View file

@ -10,4 +10,4 @@ SOURCE_FILE
IDENT "Foo"
SEMICOLON ";"
WHITESPACE "\n"
error 6: expected existential, fn, trait or impl
error 6: expected fn, trait or impl

View file

@ -69,7 +69,7 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 24: expected existential, fn, trait or impl
error 41: expected existential, fn, trait or impl
error 24: expected fn, trait or impl
error 41: expected fn, trait or impl
error 56: expected a block
error 75: expected a loop or block

View file

@ -39,7 +39,7 @@ SOURCE_FILE
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
error 6: expected existential, fn, trait or impl
error 6: expected fn, trait or impl
error 38: expected a name
error 40: missing type for `const` or `static`
error 40: expected SEMICOLON

View file

@ -11,5 +11,5 @@ SOURCE_FILE
WHITESPACE " "
STRING "\"C\""
WHITESPACE "\n"
error 10: expected existential, fn, trait or impl
error 21: expected existential, fn, trait or impl
error 10: expected fn, trait or impl
error 21: expected fn, trait or impl

View file

@ -0,0 +1,139 @@
SOURCE_FILE
FN
VISIBILITY
PUB_KW "pub"
WHITESPACE " "
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "main"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
ERROR
YIELD_KW "yield"
WHITESPACE " "
ERROR
STRING "\"\""
ERROR
SEMICOLON ";"
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
ERROR
ASYNC_KW "async"
WHITESPACE " "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
ERROR
YIELD_KW "yield"
WHITESPACE " "
ERROR
STRING "\"\""
ERROR
SEMICOLON ";"
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
EXPR_STMT
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
EXPR_STMT
CLOSURE_EXPR
MOVE_KW "move"
WHITESPACE " "
EXPR_STMT
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
EXPR_STMT
YIELD_EXPR
YIELD_KW "yield"
WHITESPACE " "
LITERAL
STRING "\"\""
SEMICOLON ";"
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
ERROR
ASYNC_KW "async"
WHITESPACE " "
EXPR_STMT
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
EXPR_STMT
CLOSURE_EXPR
MOVE_KW "move"
WHITESPACE " "
EXPR_STMT
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
EXPR_STMT
YIELD_EXPR
YIELD_KW "yield"
WHITESPACE " "
LITERAL
STRING "\"\""
SEMICOLON ";"
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 26: expected identifier
error 31: expected COMMA
error 32: expected identifier
error 34: expected COMMA
error 34: expected identifier
error 48: expected fn, trait or impl
error 55: expected identifier
error 60: expected COMMA
error 61: expected identifier
error 63: expected COMMA
error 63: expected identifier
error 75: expected SEMICOLON
error 80: expected `|`
error 80: expected SEMICOLON
error 105: expected fn, trait or impl
error 109: expected SEMICOLON
error 114: expected `|`
error 114: expected SEMICOLON

View file

@ -0,0 +1,6 @@
pub fn main() {
gen { yield ""; };
async gen { yield ""; };
gen move { yield ""; };
async gen move { yield ""; };
}

View file

@ -0,0 +1,51 @@
SOURCE_FILE
MACRO_CALL
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "gen_fn"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
ERROR
ASYNC_KW "async"
WHITESPACE " "
MACRO_CALL
PATH
PATH_SEGMENT
NAME_REF
IDENT "gen"
WHITESPACE " "
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "async_gen_fn"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
error 3: expected BANG
error 3: expected `{`, `[`, `(`
error 3: expected SEMICOLON
error 24: expected fn, trait or impl
error 28: expected BANG
error 28: expected `{`, `[`, `(`
error 28: expected SEMICOLON

View file

@ -0,0 +1,2 @@
gen fn gen_fn() {}
async gen fn async_gen_fn() {}

View file

@ -127,7 +127,8 @@ pub(super) mod token_stream {
impl<S: Copy + fmt::Debug> TokenStream<S> {
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
.ok_or("lexing error")?;
Ok(TokenStream::with_subtree(subtree))
}

View file

@ -9,7 +9,7 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
crate::server_impl::TokenStream::with_subtree(
mbe::parse_to_token_tree_static_span(call_site, src).unwrap(),
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(),
)
}
@ -19,7 +19,7 @@ fn parse_string_spanned(
src: &str,
) -> crate::server_impl::TokenStream<Span> {
crate::server_impl::TokenStream::with_subtree(
mbe::parse_to_token_tree(anchor, call_site, src).unwrap(),
mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
)
}

View file

@ -10,6 +10,7 @@
// Name = -- non-terminal definition
// 'ident' -- keyword or punct token (terminal)
// '?ident' -- contextual keyword (terminal)
// too)
// '#ident' -- generic token (terminal)
// '@ident' -- literal token (terminal)
// A B -- sequence
@ -430,7 +431,7 @@ TryExpr =
Attr* Expr '?'
BlockExpr =
Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList
Attr* Label? ('try' | 'unsafe' | ('async' 'move'?) | ('gen' 'move'?) | 'const') StmtList
PrefixExpr =
Attr* op:('-' | '!' | '*') Expr
@ -490,9 +491,12 @@ FieldExpr =
Attr* Expr '.' NameRef
ClosureExpr =
Attr* ('for' GenericParamList)? 'const'? 'static'? 'async'? 'move'? ParamList RetType?
Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType?
body:Expr
ClosureBinder =
'for' GenericParamList
IfExpr =
Attr* 'if' condition:Expr then_branch:BlockExpr
('else' else_branch:(IfExpr | BlockExpr))?

View file

@ -352,13 +352,22 @@ pub enum BlockModifier {
Unsafe(SyntaxToken),
Try(SyntaxToken),
Const(SyntaxToken),
AsyncGen(SyntaxToken),
Gen(SyntaxToken),
Label(ast::Label),
}
impl ast::BlockExpr {
pub fn modifier(&self) -> Option<BlockModifier> {
self.async_token()
.map(BlockModifier::Async)
self.gen_token()
.map(|v| {
if self.async_token().is_some() {
BlockModifier::AsyncGen(v)
} else {
BlockModifier::Gen(v)
}
})
.or_else(|| self.async_token().map(BlockModifier::Async))
.or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
.or_else(|| self.try_token().map(BlockModifier::Try))
.or_else(|| self.const_token().map(BlockModifier::Const))

View file

@ -184,6 +184,10 @@ impl BlockExpr {
#[inline]
pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
#[inline]
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
#[inline]
pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
#[inline]
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
@ -239,6 +243,17 @@ impl CastExpr {
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ClosureBinder {
pub(crate) syntax: SyntaxNode,
}
impl ClosureBinder {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ClosureExpr {
pub(crate) syntax: SyntaxNode,
@ -246,7 +261,7 @@ pub struct ClosureExpr {
impl ast::HasAttrs for ClosureExpr {}
impl ClosureExpr {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) }
#[inline]
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
#[inline]
@ -256,7 +271,7 @@ impl ClosureExpr {
#[inline]
pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
#[inline]
pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
#[inline]
@ -2446,6 +2461,20 @@ impl AstNode for CastExpr {
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ClosureBinder {
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ClosureExpr {
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
@ -5512,6 +5541,11 @@ impl std::fmt::Display for CastExpr {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureBinder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)

View file

@ -11,7 +11,7 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::info_span!("parse_text").entered();
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
let parser_input = lexed.to_input();
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output);
@ -24,7 +24,7 @@ pub(crate) fn parse_text_at(
edition: parser::Edition,
) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::info_span!("parse_text_at").entered();
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
let parser_input = lexed.to_input();
let parser_output = entry.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output);

View file

@ -6,7 +6,7 @@
//! - otherwise, we search for the nearest `{}` block which contains the edit
//! and try to parse only this block.
use parser::Reparser;
use parser::{Edition, Reparser};
use text_edit::Indel;
use crate::{
@ -51,7 +51,8 @@ fn reparse_token(
}
let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
let (new_token_kind, new_err) =
parser::LexedStr::single_token(Edition::CURRENT, &new_text)?;
if new_token_kind != prev_token_kind
|| (new_token_kind == IDENT && is_contextual_kw(&new_text))
@ -64,7 +65,8 @@ fn reparse_token(
// `b` no longer remains an identifier, but becomes a part of byte string literal
if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
new_text.push(next_char);
let token_with_next_char = parser::LexedStr::single_token(&new_text);
let token_with_next_char =
parser::LexedStr::single_token(Edition::CURRENT, &new_text);
if let Some((_kind, _error)) = token_with_next_char {
return None;
}
@ -91,7 +93,7 @@ fn reparse_block(
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit);
let lexed = parser::LexedStr::new(text.as_str());
let lexed = parser::LexedStr::new(Edition::CURRENT, text.as_str());
let parser_input = lexed.to_input();
if !is_balanced(&lexed) {
return None;

View file

@ -77,6 +77,7 @@ const RESERVED: &[&str] = &[
"abstract", "become", "box", "do", "final", "macro", "override", "priv", "typeof", "unsized",
"virtual", "yield", "try",
];
const CONTEXTUAL_RESERVED: &[&str] = &[];
pub(crate) fn generate_kind_src(
nodes: &[AstNodeSrc],
@ -123,6 +124,9 @@ pub(crate) fn generate_kind_src(
keywords.extend(RESERVED.iter().copied());
keywords.sort();
keywords.dedup();
contextual_keywords.extend(CONTEXTUAL_RESERVED.iter().copied());
contextual_keywords.sort();
contextual_keywords.dedup();
// we leak things here for simplicity, that way we don't have to deal with lifetimes
// The execution is a one shot job so thats fine
@ -137,9 +141,7 @@ pub(crate) fn generate_kind_src(
let nodes = Vec::leak(nodes);
nodes.sort();
let keywords = Vec::leak(keywords);
keywords.sort();
let contextual_keywords = Vec::leak(contextual_keywords);
contextual_keywords.sort();
let literals = Vec::leak(literals);
literals.sort();
let tokens = Vec::leak(tokens);