Rollup merge of #143708 - epage:pretty, r=compiler-errors
fix: Include frontmatter in -Zunpretty output In the implementation (rust-lang/rust#140035), this was left as an open question for the tracking issue (rust-lang/rust#136889). My assumption is that this should be carried over. The test was carried over from rust-lang/rust#137193 which was superseded by rust-lang/rust#140035. Thankfully, either way, `-Zunpretty` is unstable and we can always change it even if we stabilize frontmatter.
This commit is contained in:
commit
a1844ec6c0
6 changed files with 17 additions and 20 deletions
|
|
@ -9,7 +9,7 @@ use clippy_utils::visitors::{Descend, for_each_expr};
|
|||
use hir::HirId;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
|
||||
use rustc_lexer::{TokenKind, tokenize};
|
||||
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
|
||||
use rustc_lint::{LateContext, LateLintPass, LintContext};
|
||||
use rustc_session::impl_lint_pass;
|
||||
use rustc_span::{BytePos, Pos, RelativeBytePos, Span, SyntaxContext};
|
||||
|
|
@ -746,7 +746,7 @@ fn text_has_safety_comment(src: &str, line_starts: &[RelativeBytePos], start_pos
|
|||
loop {
|
||||
if line.starts_with("/*") {
|
||||
let src = &src[line_start..line_starts.last().unwrap().to_usize()];
|
||||
let mut tokens = tokenize(src);
|
||||
let mut tokens = tokenize(src, FrontmatterAllowed::No);
|
||||
return (src[..tokens.next().unwrap().len as usize]
|
||||
.to_ascii_uppercase()
|
||||
.contains("SAFETY:")
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use clippy_utils::source::SpanRangeExt;
|
|||
use itertools::Itertools;
|
||||
use rustc_ast::{Crate, Expr, ExprKind, FormatArgs};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_lexer::{TokenKind, tokenize};
|
||||
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
|
||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||
use rustc_session::impl_lint_pass;
|
||||
use rustc_span::{Span, hygiene};
|
||||
|
|
@ -82,7 +82,7 @@ fn has_span_from_proc_macro(cx: &EarlyContext<'_>, args: &FormatArgs) -> bool {
|
|||
.all(|sp| {
|
||||
sp.check_source_text(cx, |src| {
|
||||
// text should be either `, name` or `, name =`
|
||||
let mut iter = tokenize(src).filter(|t| {
|
||||
let mut iter = tokenize(src, FrontmatterAllowed::No).filter(|t| {
|
||||
!matches!(
|
||||
t.kind,
|
||||
TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ use rustc_hir::def::{DefKind, Res};
|
|||
use rustc_hir::{
|
||||
BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp,
|
||||
};
|
||||
use rustc_lexer::tokenize;
|
||||
use rustc_lexer::{FrontmatterAllowed, tokenize};
|
||||
use rustc_lint::LateContext;
|
||||
use rustc_middle::mir::ConstValue;
|
||||
use rustc_middle::mir::interpret::{Scalar, alloc_range};
|
||||
|
|
@ -304,9 +304,7 @@ pub fn lit_to_mir_constant<'tcx>(lit: &LitKind, ty: Option<Ty<'tcx>>) -> Constan
|
|||
match *lit {
|
||||
LitKind::Str(ref is, _) => Constant::Str(is.to_string()),
|
||||
LitKind::Byte(b) => Constant::Int(u128::from(b)),
|
||||
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => {
|
||||
Constant::Binary(s.as_byte_str().to_vec())
|
||||
}
|
||||
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => Constant::Binary(s.as_byte_str().to_vec()),
|
||||
LitKind::Char(c) => Constant::Char(c),
|
||||
LitKind::Int(n, _) => Constant::Int(n.get()),
|
||||
LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty {
|
||||
|
|
@ -568,9 +566,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> {
|
|||
} else {
|
||||
match &lit.node {
|
||||
LitKind::Str(is, _) => Some(is.is_empty()),
|
||||
LitKind::ByteStr(s, _) | LitKind::CStr(s, _) => {
|
||||
Some(s.as_byte_str().is_empty())
|
||||
}
|
||||
LitKind::ByteStr(s, _) | LitKind::CStr(s, _) => Some(s.as_byte_str().is_empty()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
@ -715,7 +711,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> {
|
|||
&& let Some(src) = src.as_str()
|
||||
{
|
||||
use rustc_lexer::TokenKind::{BlockComment, LineComment, OpenBrace, Semi, Whitespace};
|
||||
if !tokenize(src)
|
||||
if !tokenize(src, FrontmatterAllowed::No)
|
||||
.map(|t| t.kind)
|
||||
.filter(|t| !matches!(t, Whitespace | LineComment { .. } | BlockComment { .. } | Semi))
|
||||
.eq([OpenBrace])
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use rustc_hir::{
|
|||
Pat, PatExpr, PatExprKind, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, StructTailExpr,
|
||||
TraitBoundModifiers, Ty, TyKind, TyPat, TyPatKind,
|
||||
};
|
||||
use rustc_lexer::{TokenKind, tokenize};
|
||||
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
|
||||
use rustc_lint::LateContext;
|
||||
use rustc_middle::ty::TypeckResults;
|
||||
use rustc_span::{BytePos, ExpnKind, MacroKind, Symbol, SyntaxContext, sym};
|
||||
|
|
@ -686,7 +686,7 @@ fn reduce_exprkind<'hir>(cx: &LateContext<'_>, kind: &'hir ExprKind<'hir>) -> &'
|
|||
// `{}` => `()`
|
||||
([], None)
|
||||
if block.span.check_source_text(cx, |src| {
|
||||
tokenize(src)
|
||||
tokenize(src, FrontmatterAllowed::No)
|
||||
.map(|t| t.kind)
|
||||
.filter(|t| {
|
||||
!matches!(
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ use rustc_hir::{
|
|||
Param, Pat, PatExpr, PatExprKind, PatKind, Path, PathSegment, QPath, Stmt, StmtKind, TraitFn, TraitItem,
|
||||
TraitItemKind, TraitRef, TyKind, UnOp, def,
|
||||
};
|
||||
use rustc_lexer::{TokenKind, tokenize};
|
||||
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
|
||||
use rustc_lint::{LateContext, Level, Lint, LintContext};
|
||||
use rustc_middle::hir::nested_filter;
|
||||
use rustc_middle::hir::place::PlaceBase;
|
||||
|
|
@ -2764,7 +2764,7 @@ pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'tcx>) -> ExprUseCtx
|
|||
/// Tokenizes the input while keeping the text associated with each token.
|
||||
pub fn tokenize_with_text(s: &str) -> impl Iterator<Item = (TokenKind, &str, InnerSpan)> {
|
||||
let mut pos = 0;
|
||||
tokenize(s).map(move |t| {
|
||||
tokenize(s, FrontmatterAllowed::No).map(move |t| {
|
||||
let end = pos + t.len;
|
||||
let range = pos as usize..end as usize;
|
||||
let inner = InnerSpan::new(range.start, range.end);
|
||||
|
|
@ -2779,7 +2779,7 @@ pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool {
|
|||
let Ok(snippet) = sm.span_to_snippet(span) else {
|
||||
return false;
|
||||
};
|
||||
return tokenize(&snippet).any(|token| {
|
||||
return tokenize(&snippet, FrontmatterAllowed::No).any(|token| {
|
||||
matches!(
|
||||
token.kind,
|
||||
TokenKind::BlockComment { .. } | TokenKind::LineComment { .. }
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use std::sync::Arc;
|
|||
use rustc_ast::{LitKind, StrStyle};
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource};
|
||||
use rustc_lexer::{LiteralKind, TokenKind, tokenize};
|
||||
use rustc_lexer::{FrontmatterAllowed, LiteralKind, TokenKind, tokenize};
|
||||
use rustc_lint::{EarlyContext, LateContext};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::Session;
|
||||
|
|
@ -277,7 +277,7 @@ fn map_range(
|
|||
}
|
||||
|
||||
fn ends_with_line_comment_or_broken(text: &str) -> bool {
|
||||
let Some(last) = tokenize(text).last() else {
|
||||
let Some(last) = tokenize(text, FrontmatterAllowed::No).last() else {
|
||||
return false;
|
||||
};
|
||||
match last.kind {
|
||||
|
|
@ -310,7 +310,8 @@ fn with_leading_whitespace_inner(lines: &[RelativeBytePos], src: &str, range: Ra
|
|||
&& ends_with_line_comment_or_broken(&start[prev_start..])
|
||||
&& let next_line = lines.partition_point(|&pos| pos.to_usize() < range.end)
|
||||
&& let next_start = lines.get(next_line).map_or(src.len(), |&x| x.to_usize())
|
||||
&& tokenize(src.get(range.end..next_start)?).any(|t| !matches!(t.kind, TokenKind::Whitespace))
|
||||
&& tokenize(src.get(range.end..next_start)?, FrontmatterAllowed::No)
|
||||
.any(|t| !matches!(t.kind, TokenKind::Whitespace))
|
||||
{
|
||||
Some(range.start)
|
||||
} else {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue