diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 25d09a33c154..f04bd7b8f021 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -674,8 +674,8 @@ impl EmitterWriter { // | | something about `foo` // | something about `fn foo()` annotations_position.sort_by(|a, b| { - // Decreasing order - a.1.len().cmp(&b.1.len()).reverse() + // Decreasing order. When `a` and `b` are the same length, prefer `Primary`. + (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse() }); // Write the underlines. diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index e61229db86dd..ecbc900215f7 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -436,7 +436,13 @@ impl cstore::CStore { let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); - let body = source_file_to_stream(&sess.parse_sess, source_file, None); + let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None); + for err in errors { + sess.struct_span_err( + err.found_span, + "unclosed delimiter cstore", + ).emit(); + } // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2e3233c8ed8f..d3fc1c03634e 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -33,6 +33,15 @@ impl Default for TokenAndSpan { } } +#[derive(Clone, Debug)] +pub struct UnmatchedBrace { + pub expected_delim: token::DelimToken, + pub found_delim: token::DelimToken, + pub found_span: Span, + pub unclosed_span: Option, + pub candidate_span: Option, +} + pub struct StringReader<'a> { pub sess: &'a ParseSess, /// The absolute offset within the source_map of the next character to read @@ -58,6 +67,7 @@ pub struct StringReader<'a> { span_src_raw: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, + crate unmatched_braces: Vec, /// The type and spans for all braces /// /// Used only for error recovery when arriving to EOF with mismatched braces. @@ -222,6 +232,7 @@ impl<'a> StringReader<'a> { span: syntax_pos::DUMMY_SP, span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), override_span, last_unclosed_found_span: None, diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 7699d9eab222..0db36c84cdfe 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,5 +1,5 @@ use crate::print::pprust::token_to_string; -use crate::parse::lexer::StringReader; +use crate::parse::lexer::{StringReader, UnmatchedBrace}; use crate::parse::{token, PResult}; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; @@ -101,38 +101,38 @@ impl<'a> StringReader<'a> { } // Incorrect delimiter. token::CloseDelim(other) => { - let token_str = token_to_string(&self.token); + let mut unclosed_delimiter = None; + let mut candidate = None; if self.last_unclosed_found_span != Some(self.span) { // do not complain about the same unclosed delimiter multiple times self.last_unclosed_found_span = Some(self.span); - let msg = format!("incorrect close delimiter: `{}`", token_str); - let mut err = self.sess.span_diagnostic.struct_span_err( - self.span, - &msg, - ); - err.span_label(self.span, "incorrect close delimiter"); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { - err.span_label(sp, "un-closed delimiter"); + unclosed_delimiter = Some(sp); }; if let Some(current_padding) = sm.span_to_margin(self.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding if current_padding == padding && brace == &other { - err.span_label( - *brace_span, - "close delimiter possibly meant for this", - ); + candidate = Some(*brace_span); } } } } - err.emit(); + let (tok, _) = self.open_braces.pop().unwrap(); + self.unmatched_braces.push(UnmatchedBrace { + expected_delim: tok, + found_delim: other, + found_span: self.span, + unclosed_span: unclosed_delimiter, + candidate_span: candidate, + }); + } else { + self.open_braces.pop(); } - self.open_braces.pop().unwrap(); // If the incorrect delimiter matches an earlier opening // delimiter, then don't consume it (it can be used to diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c723d591f2fb..52c7e774ab64 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -9,6 +9,7 @@ use crate::parse::parser::Parser; use crate::symbol::Symbol; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; +use crate::print::pprust::token_to_string; use rustc_data_structures::sync::{Lrc, Lock}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; @@ -136,15 +137,17 @@ pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: & new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, - override_span: Option) - -> TokenStream { +pub fn parse_stream_from_source_str( + name: FileName, + source: String, + sess: &ParseSess, + override_span: Option, +) -> (TokenStream, Vec) { source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } /// Create a new parser from a source string -pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) - -> Parser<'_> { +pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } @@ -195,12 +198,14 @@ fn source_file_to_parser(sess: &ParseSess, source_file: Lrc) -> Pars /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the /// initial token stream. -fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc) - -> Result, Vec> -{ +fn maybe_source_file_to_parser( + sess: &ParseSess, + source_file: Lrc, +) -> Result, Vec> { let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?); - + let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; + let mut parser = stream_to_parser(sess, stream); + parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } @@ -247,25 +252,43 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a source_file, produce a sequence of token-trees -pub fn source_file_to_stream(sess: &ParseSess, - source_file: Lrc, - override_span: Option) -> TokenStream { +pub fn source_file_to_stream( + sess: &ParseSess, + source_file: Lrc, + override_span: Option, +) -> (TokenStream, Vec) { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } /// Given a source file, produce a sequence of token-trees. Returns any buffered errors from /// parsing the token tream. -pub fn maybe_file_to_stream(sess: &ParseSess, - source_file: Lrc, - override_span: Option) -> Result> { +pub fn maybe_file_to_stream( + sess: &ParseSess, + source_file: Lrc, + override_span: Option, +) -> Result<(TokenStream, Vec), Vec> { let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; srdr.real_token(); match srdr.parse_all_token_trees() { - Ok(stream) => Ok(stream), + Ok(stream) => Ok((stream, srdr.unmatched_braces)), Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); + for unmatched in srdr.unmatched_braces { + let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + db.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + db.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + db.span_label(sp, "un-closed delimiter"); + } + db.buffer(&mut buffer); + } Err(buffer) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cacdab980fac..2e605ab6544d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -35,7 +35,7 @@ use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use crate::parse::{self, SeqSep, classify, token}; -use crate::parse::lexer::TokenAndSpan; +use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::token::DelimToken; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; @@ -251,6 +251,8 @@ pub struct Parser<'a> { /// /// See the comments in the `parse_path_segment` function for more details. crate unmatched_angle_bracket_count: u32, + crate max_angle_bracket_count: u32, + crate unclosed_delims: Vec, } @@ -573,6 +575,8 @@ impl<'a> Parser<'a> { desugar_doc_comments, cfg_mods: true, unmatched_angle_bracket_count: 0, + max_angle_bracket_count: 0, + unclosed_delims: Vec::new(), }; let tok = parser.next_tok(); @@ -642,11 +646,11 @@ impl<'a> Parser<'a> { /// Expect and consume the token t. Signal an error if /// the next token is not t. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { + pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(()) + Ok(false) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); @@ -661,6 +665,12 @@ impl<'a> Parser<'a> { self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); + match self.recover_closing_delimiter(&[t.clone()], err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { @@ -680,12 +690,62 @@ impl<'a> Parser<'a> { } } + fn recover_closing_delimiter( + &mut self, + tokens: &[token::Token], + mut err: DiagnosticBuilder<'a>, + ) -> PResult<'a, bool> { + let mut pos = None; + // we want to use the last closing delim that would apply + for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { + if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) + && Some(self.span) > unmatched.unclosed_span + { + pos = Some(i); + } + } + match pos { + Some(pos) => { + // Recover and assume that the detected unclosed delimiter was meant for + // this location. Emit the diagnostic and act as if the delimiter was + // present for the parser's sake. + + // Don't attempt to recover from this unclosed delimiter more than once. + let unmatched = self.unclosed_delims.remove(pos); + let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); + + // We want to suggest the inclusion of the closing delimiter where it makes + // the most sense, which is immediately after the last token: + // + // {foo(bar {}} + // - ^ help: `)` may belong here + // | + // in order to close this... + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "in order to close this..."); + } + err.span_suggestion_short_with_applicability( + self.sess.source_map().next_point(self.prev_span), + &format!("{} may belong here", delim.to_string()), + delim.to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + // self.expected_tokens.clear(); // reduce errors + Ok(true) + } + _ => Err(err), + } + } + /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. - pub fn expect_one_of(&mut self, - edible: &[token::Token], - inedible: &[token::Token]) -> PResult<'a, ()>{ + pub fn expect_one_of( + &mut self, + edible: &[token::Token], + inedible: &[token::Token], + ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. @@ -705,10 +765,10 @@ impl<'a> Parser<'a> { } if edible.contains(&self.token) { self.bump(); - Ok(()) + Ok(false) } else if inedible.contains(&self.token) { // leave it in the input - Ok(()) + Ok(false) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) @@ -759,6 +819,15 @@ impl<'a> Parser<'a> { } else { label_sp }; + match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { + TokenType::Token(t) => Some(t.clone()), + _ => None, + }).collect::>(), err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { @@ -1053,6 +1122,7 @@ impl<'a> Parser<'a> { if ate { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count += 1; + self.max_angle_bracket_count += 1; debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); } @@ -1093,14 +1163,30 @@ impl<'a> Parser<'a> { }; match ate { - Some(x) => { + Some(_) => { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count -= 1; debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); - Ok(x) + Ok(()) }, - None => self.unexpected(), + None => { + match ( + &self.token, + self.unmatched_angle_bracket_count, + self.max_angle_bracket_count > 1, + ) { + // (token::OpenDelim(_), 1, true) | (token::Semi, 1, true) => { + // self.struct_span_err( + // self.span, + // &format!("expected `>`, found `{}`", self.this_token_to_string()), + // // ).span_suggestion_short_with_applicability( + // ).emit(); + // Ok(()) + // } + _ => self.unexpected(), + } + } } } @@ -1127,19 +1213,22 @@ impl<'a> Parser<'a> { -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let val = self.parse_seq_to_before_end(ket, sep, f)?; - self.bump(); + let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.bump(); + } Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end(&mut self, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> + pub fn parse_seq_to_before_end( + &mut self, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, (Vec, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) @@ -1151,10 +1240,11 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: F, - ) -> PResult<'a, Vec> + ) -> PResult<'a, (Vec, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { - let mut first: bool = true; + let mut first = true; + let mut recovered = false; let mut v = vec![]; while !kets.iter().any(|k| { match expect { @@ -1170,23 +1260,30 @@ impl<'a> Parser<'a> { if first { first = false; } else { - if let Err(mut e) = self.expect(t) { - // Attempt to keep parsing if it was a similar separator - if let Some(ref tokens) = t.similar_tokens() { - if tokens.contains(&self.token) { - self.bump(); - } + match self.expect(t) { + Ok(false) => {} + Ok(true) => { + recovered = true; + break; } - e.emit(); - // Attempt to keep parsing if it was an omitted separator - match f(self) { - Ok(t) => { - v.push(t); - continue; - }, - Err(mut e) => { - e.cancel(); - break; + Err(mut e) => { + // Attempt to keep parsing if it was a similar separator + if let Some(ref tokens) = t.similar_tokens() { + if tokens.contains(&self.token) { + self.bump(); + } + } + e.emit(); + // Attempt to keep parsing if it was an omitted separator + match f(self) { + Ok(t) => { + v.push(t); + continue; + }, + Err(mut e) => { + e.cancel(); + break; + } } } } @@ -1205,23 +1302,26 @@ impl<'a> Parser<'a> { v.push(t); } - Ok(v) + Ok((v, recovered)) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - fn parse_unspanned_seq(&mut self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> where + fn parse_unspanned_seq( + &mut self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; - let result = self.parse_seq_to_before_end(ket, sep, f)?; - self.eat(ket); + let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.eat(ket); + } Ok(result) } @@ -2273,7 +2373,10 @@ impl<'a> Parser<'a> { // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If // it isn't, then we reset the unmatched angle bracket count as we're about to start // parsing a new path. - if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; } + if style == PathStyle::Expr { + self.unmatched_angle_bracket_count = 0; + self.max_angle_bracket_count = 0; + } let args = if self.eat_lt() { // `<'a, T, A = U>` @@ -2285,12 +2388,14 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` self.bump(); // `(` - let inputs = self.parse_seq_to_before_tokens( + let (inputs, recovered) = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; - self.bump(); // `)` + if !recovered { + self.bump(); // `)` + } let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) @@ -2496,9 +2601,13 @@ impl<'a> Parser<'a> { // (e,) is a tuple with only one field, e let mut es = vec![]; let mut trailing_comma = false; + let mut recovered = false; while self.token != token::CloseDelim(token::Paren) { es.push(self.parse_expr()?); - self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?; + recovered = self.expect_one_of( + &[], + &[token::Comma, token::CloseDelim(token::Paren)], + )?; if self.eat(&token::Comma) { trailing_comma = true; } else { @@ -2506,7 +2615,9 @@ impl<'a> Parser<'a> { break; } } - self.bump(); + if !recovered { + self.bump(); + } hi = self.prev_span; ex = if es.len() == 1 && !trailing_comma { @@ -2802,7 +2913,7 @@ impl<'a> Parser<'a> { match self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) { - Ok(()) => if let Some(f) = parsed_field.or(recovery_field) { + Ok(_) => if let Some(f) = parsed_field.or(recovery_field) { // only include the field if there's no parse error for the field name fields.push(f); } @@ -6011,7 +6122,7 @@ impl<'a> Parser<'a> { let sp = self.span; let mut variadic = false; - let args: Vec> = + let (args, recovered): (Vec>, bool) = self.parse_seq_to_before_end( &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), @@ -6059,7 +6170,9 @@ impl<'a> Parser<'a> { } )?; - self.eat(&token::CloseDelim(token::Paren)); + if !recovered { + self.eat(&token::CloseDelim(token::Paren)); + } let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); @@ -6204,15 +6317,15 @@ impl<'a> Parser<'a> { // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); - let fn_inputs = if let Some(self_arg) = self_arg { + let (fn_inputs, recovered) = if let Some(self_arg) = self_arg { if self.check(&token::CloseDelim(token::Paren)) { - vec![self_arg] + (vec![self_arg], false) } else if self.eat(&token::Comma) { let mut fn_inputs = vec![self_arg]; - fn_inputs.append(&mut self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), sep, parse_arg_fn)? - ); - fn_inputs + let (mut input, recovered) = self.parse_seq_to_before_end( + &token::CloseDelim(token::Paren), sep, parse_arg_fn)?; + fn_inputs.append(&mut input); + (fn_inputs, recovered) } else { return self.unexpected(); } @@ -6220,8 +6333,10 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)? }; - // Parse closing paren and return type. - self.expect(&token::CloseDelim(token::Paren))?; + if !recovered { + // Parse closing paren and return type. + self.expect(&token::CloseDelim(token::Paren))?; + } Ok(P(FnDecl { inputs: fn_inputs, output: self.parse_ret_ty(true)?, @@ -6241,7 +6356,7 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, |p| p.parse_fn_block_arg() - )?; + )?.0; self.expect_or()?; args } @@ -8238,7 +8353,7 @@ impl<'a> Parser<'a> { // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { - self.expect(&token::Semi)? + self.expect(&token::Semi)?; } Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim }))) @@ -8383,11 +8498,27 @@ impl<'a> Parser<'a> { /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { let lo = self.span; - Ok(ast::Crate { + let krate = Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, span: lo.to(self.span), - }) + }); + for unmatched in &self.unclosed_delims { + let mut err = self.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + err.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + err.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "un-closed delimiter"); + } + err.emit(); + } + self.unclosed_delims.clear(); + krate } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3b1fa5ea01f5..a0f3113a1cb7 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -487,8 +487,8 @@ impl Token { /// Enables better error recovery when the wrong token is found. crate fn similar_tokens(&self) -> Option> { match *self { - Comma => Some(vec![Dot, Lt]), - Semi => Some(vec![Colon]), + Comma => Some(vec![Dot, Lt, Semi]), + Semi => Some(vec![Colon, Comma]), _ => None } } @@ -545,7 +545,15 @@ impl Token { // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::token_to_string(self); let filename = FileName::macro_expansion_source_code(&source); - parse_stream_from_source_str(filename, source, sess, Some(span)) + let (tokens, errors) = parse_stream_from_source_str( + filename, source, sess, Some(span)); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter for_real", + ).emit(); + } + tokens }); // During early phases of the compiler the AST could get modified @@ -786,12 +794,18 @@ fn prepend_attrs(sess: &ParseSess, let source = pprust::attr_to_string(attr); let macro_filename = FileName::macro_expansion_source_code(&source); if attr.is_sugared_doc { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs", + ).emit(); + } builder.push(stream); continue } @@ -808,12 +822,18 @@ fn prepend_attrs(sess: &ParseSess, // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. } else { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs 2", + ).emit(); + } brackets.push(stream); } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 7de9b9343a8f..ef291e2102b4 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -413,7 +413,7 @@ impl server::TokenStream for Rustc<'_> { src.to_string(), self.sess, Some(self.call_site), - ) + ).0 } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() diff --git a/src/test/ui/parser-recovery-2.stderr b/src/test/ui/parser-recovery-2.stderr index 92d8cbc100a0..76f7af38e776 100644 --- a/src/test/ui/parser-recovery-2.stderr +++ b/src/test/ui/parser-recovery-2.stderr @@ -1,3 +1,9 @@ +error: unexpected token: `;` + --> $DIR/parser-recovery-2.rs:12:15 + | +LL | let x = y.; //~ ERROR unexpected token + | ^ + error: incorrect close delimiter: `)` --> $DIR/parser-recovery-2.rs:8:5 | @@ -7,12 +13,6 @@ LL | let x = foo(); //~ ERROR cannot find function `foo` in this scope LL | ) //~ ERROR incorrect close delimiter: `)` | ^ incorrect close delimiter -error: unexpected token: `;` - --> $DIR/parser-recovery-2.rs:12:15 - | -LL | let x = y.; //~ ERROR unexpected token - | ^ - error[E0425]: cannot find function `foo` in this scope --> $DIR/parser-recovery-2.rs:7:17 | diff --git a/src/test/ui/parser/issue-10636-2.rs b/src/test/ui/parser/issue-10636-2.rs index a02fd41b349c..6fb63639d5f6 100644 --- a/src/test/ui/parser/issue-10636-2.rs +++ b/src/test/ui/parser/issue-10636-2.rs @@ -5,7 +5,7 @@ pub fn trace_option(option: Option) { option.map(|some| 42; //~^ ERROR: expected one of -} //~ ERROR: incorrect close delimiter +} //~^ ERROR: expected expression, found `)` fn main() {} diff --git a/src/test/ui/parser/issue-10636-2.stderr b/src/test/ui/parser/issue-10636-2.stderr index 9b3115cb3f4b..670a116eb51f 100644 --- a/src/test/ui/parser/issue-10636-2.stderr +++ b/src/test/ui/parser/issue-10636-2.stderr @@ -1,25 +1,17 @@ -error: incorrect close delimiter: `}` - --> $DIR/issue-10636-2.rs:8:1 - | -LL | pub fn trace_option(option: Option) { - | - close delimiter possibly meant for this -LL | option.map(|some| 42; - | - un-closed delimiter -... -LL | } //~ ERROR: incorrect close delimiter - | ^ incorrect close delimiter - error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/issue-10636-2.rs:5:25 | LL | option.map(|some| 42; - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here + | - ^ + | | | + | | help: `)` may belong here + | in order to close this... error: expected expression, found `)` --> $DIR/issue-10636-2.rs:8:1 | -LL | } //~ ERROR: incorrect close delimiter +LL | } | ^ expected expression -error: aborting due to 3 previous errors +error: aborting due to 2 previous errors diff --git a/src/test/ui/parser/issue-2354.rs b/src/test/ui/parser/issue-2354.rs index 565f84822f7d..b383bc00f911 100644 --- a/src/test/ui/parser/issue-2354.rs +++ b/src/test/ui/parser/issue-2354.rs @@ -1,4 +1,5 @@ -fn foo() { //~ NOTE un-closed delimiter +fn foo() { + //~^ NOTE un-closed delimiter match Some(10) { //~^ NOTE this delimiter might not be properly closed... Some(y) => { panic!(); } diff --git a/src/test/ui/parser/issue-2354.stderr b/src/test/ui/parser/issue-2354.stderr index 0f4cd5724ce1..f1b0905d8660 100644 --- a/src/test/ui/parser/issue-2354.stderr +++ b/src/test/ui/parser/issue-2354.stderr @@ -1,8 +1,9 @@ error: this file contains an un-closed delimiter - --> $DIR/issue-2354.rs:15:66 + --> $DIR/issue-2354.rs:16:66 | -LL | fn foo() { //~ NOTE un-closed delimiter +LL | fn foo() { | - un-closed delimiter +LL | //~^ NOTE un-closed delimiter LL | match Some(10) { | - this delimiter might not be properly closed... ... @@ -16,7 +17,7 @@ error[E0601]: `main` function not found in crate `issue_2354` | = note: the main function must be defined at the crate level but you have one or more functions named 'main' that are not defined at the crate level. Either move the definition or attach the `#[main]` attribute to override this behavior. note: here is a function named 'main' - --> $DIR/issue-2354.rs:14:1 + --> $DIR/issue-2354.rs:15:1 | LL | fn main() {} //~ NOTE here is a function named 'main' | ^^^^^^^^^^^^ diff --git a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr index 805ba8b6baa6..abb082097953 100644 --- a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr +++ b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr @@ -1,3 +1,9 @@ +error: unexpected close delimiter: `}` + --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1 + | +LL | } //~ ERROR unexpected close delimiter: `}` + | ^ unexpected close delimiter + error: incorrect close delimiter: `}` --> $DIR/macro-mismatched-delim-paren-brace.rs:4:5 | @@ -7,11 +13,5 @@ LL | bar, "baz", 1, 2.0 LL | } //~ ERROR incorrect close delimiter | ^ incorrect close delimiter -error: unexpected close delimiter: `}` - --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1 - | -LL | } //~ ERROR unexpected close delimiter: `}` - | ^ unexpected close delimiter - error: aborting due to 2 previous errors diff --git a/src/test/ui/resolve/token-error-correct-3.rs b/src/test/ui/resolve/token-error-correct-3.rs index 86cf71117a6f..b1ca0bbfc57c 100644 --- a/src/test/ui/resolve/token-error-correct-3.rs +++ b/src/test/ui/resolve/token-error-correct-3.rs @@ -17,7 +17,7 @@ pub mod raw { //~| expected type `()` //~| found type `std::result::Result` //~| expected one of - } else { //~ ERROR: incorrect close delimiter: `}` + } else { //~^ ERROR: expected one of //~| unexpected token Ok(false); diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr index 2164d27a0517..fcc1c34d1fc5 100644 --- a/src/test/ui/resolve/token-error-correct-3.stderr +++ b/src/test/ui/resolve/token-error-correct-3.stderr @@ -1,19 +1,11 @@ -error: incorrect close delimiter: `}` - --> $DIR/token-error-correct-3.rs:20:9 - | -LL | if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory` - | - close delimiter possibly meant for this -LL | callback(path.as_ref(); //~ ERROR expected one of - | - un-closed delimiter -... -LL | } else { //~ ERROR: incorrect close delimiter: `}` - | ^ incorrect close delimiter - error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/token-error-correct-3.rs:14:35 | LL | callback(path.as_ref(); //~ ERROR expected one of - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here + | - ^ + | | | + | | help: `)` may belong here + | in order to close this... error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` --> $DIR/token-error-correct-3.rs:20:9 @@ -21,7 +13,7 @@ error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types | - expected one of `.`, `;`, `?`, `}`, or an operator here ... -LL | } else { //~ ERROR: incorrect close delimiter: `}` +LL | } else { | ^ unexpected token error[E0425]: cannot find function `is_directory` in this scope @@ -41,7 +33,7 @@ LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mis = note: expected type `()` found type `std::result::Result` -error: aborting due to 5 previous errors +error: aborting due to 4 previous errors Some errors occurred: E0308, E0425. For more information about an error, try `rustc --explain E0308`. diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index 0a4590461b5a..48a247a5898e 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -1,3 +1,9 @@ +error: expected expression, found `;` + --> $DIR/token-error-correct.rs:4:13 + | +LL | foo(bar(; + | ^ expected expression + error: incorrect close delimiter: `}` --> $DIR/token-error-correct.rs:6:1 | @@ -9,11 +15,5 @@ LL | //~^ ERROR: expected expression, found `;` LL | } | ^ incorrect close delimiter -error: expected expression, found `;` - --> $DIR/token-error-correct.rs:4:13 - | -LL | foo(bar(; - | ^ expected expression - error: aborting due to 2 previous errors