clippy_dev: Rename RustSearcher to Cursor and move it to its own module.

This commit is contained in:
Jason Newcomb 2025-10-10 17:49:50 -04:00
parent 4f403f39b5
commit 88c0674bc8
5 changed files with 277 additions and 234 deletions

View file

@ -1,4 +1,4 @@
use crate::parse::{RustSearcher, Token};
use crate::parse::cursor::{self, Cursor};
use crate::utils::Version;
use clap::ValueEnum;
use indoc::{formatdoc, writedoc};
@ -517,21 +517,21 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
// Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl
fn parse_mod_file(path: &Path, contents: &str) -> (&'static str, usize) {
#[allow(clippy::enum_glob_use)]
use Token::*;
use cursor::Pat::*;
let mut context = None;
let mut decl_end = None;
let mut searcher = RustSearcher::new(contents);
while let Some(name) = searcher.find_capture_token(CaptureIdent) {
let mut cursor = Cursor::new(contents);
while let Some(name) = cursor.find_capture_pat(CaptureIdent) {
match name {
"declare_clippy_lint" => {
if searcher.match_tokens(&[Bang, OpenBrace], &mut []) && searcher.find_token(CloseBrace) {
decl_end = Some(searcher.pos());
if cursor.match_all(&[Bang, OpenBrace], &mut []) && cursor.find_pat(CloseBrace) {
decl_end = Some(cursor.pos());
}
},
"impl" => {
let mut capture = "";
if searcher.match_tokens(&[Lt, Lifetime, Gt, CaptureIdent], &mut [&mut capture]) {
if cursor.match_all(&[Lt, Lifetime, Gt, CaptureIdent], &mut [&mut capture]) {
match capture {
"LateLintPass" => context = Some("LateContext"),
"EarlyLintPass" => context = Some("EarlyContext"),

View file

@ -1,187 +1,12 @@
pub mod cursor;
use self::cursor::Cursor;
use crate::utils::{ErrAction, File, expect_action};
use core::ops::Range;
use core::slice;
use rustc_lexer::{self as lexer, FrontmatterAllowed};
use std::fs;
use std::path::{Path, PathBuf};
use walkdir::{DirEntry, WalkDir};
#[derive(Clone, Copy)]
pub enum Token<'a> {
/// Matches any number of comments / doc comments.
AnyComment,
Ident(&'a str),
CaptureIdent,
LitStr,
CaptureLitStr,
Bang,
CloseBrace,
CloseBracket,
CloseParen,
/// This will consume the first colon even if the second doesn't exist.
DoubleColon,
Comma,
Eq,
Lifetime,
Lt,
Gt,
OpenBrace,
OpenBracket,
OpenParen,
Pound,
Semi,
}
pub struct RustSearcher<'txt> {
text: &'txt str,
cursor: lexer::Cursor<'txt>,
pos: u32,
next_token: lexer::Token,
}
impl<'txt> RustSearcher<'txt> {
#[must_use]
#[expect(clippy::inconsistent_struct_constructor)]
pub fn new(text: &'txt str) -> Self {
let mut cursor = lexer::Cursor::new(text, FrontmatterAllowed::Yes);
Self {
text,
pos: 0,
next_token: cursor.advance_token(),
cursor,
}
}
#[must_use]
pub fn peek_text(&self) -> &'txt str {
&self.text[self.pos as usize..(self.pos + self.next_token.len) as usize]
}
#[must_use]
pub fn peek_len(&self) -> u32 {
self.next_token.len
}
#[must_use]
pub fn peek(&self) -> lexer::TokenKind {
self.next_token.kind
}
#[must_use]
pub fn pos(&self) -> u32 {
self.pos
}
#[must_use]
pub fn at_end(&self) -> bool {
self.next_token.kind == lexer::TokenKind::Eof
}
pub fn step(&mut self) {
// `next_token.len` is zero for the eof marker.
self.pos += self.next_token.len;
self.next_token = self.cursor.advance_token();
}
/// Consumes the next token if it matches the requested value and captures the value if
/// requested. Returns true if a token was matched.
fn read_token(&mut self, token: Token<'_>, captures: &mut slice::IterMut<'_, &mut &'txt str>) -> bool {
loop {
match (token, self.next_token.kind) {
(_, lexer::TokenKind::Whitespace)
| (
Token::AnyComment,
lexer::TokenKind::BlockComment { terminated: true, .. } | lexer::TokenKind::LineComment { .. },
) => self.step(),
(Token::AnyComment, _) => return true,
(Token::Bang, lexer::TokenKind::Bang)
| (Token::CloseBrace, lexer::TokenKind::CloseBrace)
| (Token::CloseBracket, lexer::TokenKind::CloseBracket)
| (Token::CloseParen, lexer::TokenKind::CloseParen)
| (Token::Comma, lexer::TokenKind::Comma)
| (Token::Eq, lexer::TokenKind::Eq)
| (Token::Lifetime, lexer::TokenKind::Lifetime { .. })
| (Token::Lt, lexer::TokenKind::Lt)
| (Token::Gt, lexer::TokenKind::Gt)
| (Token::OpenBrace, lexer::TokenKind::OpenBrace)
| (Token::OpenBracket, lexer::TokenKind::OpenBracket)
| (Token::OpenParen, lexer::TokenKind::OpenParen)
| (Token::Pound, lexer::TokenKind::Pound)
| (Token::Semi, lexer::TokenKind::Semi)
| (
Token::LitStr,
lexer::TokenKind::Literal {
kind: lexer::LiteralKind::Str { terminated: true } | lexer::LiteralKind::RawStr { .. },
..
},
) => {
self.step();
return true;
},
(Token::Ident(x), lexer::TokenKind::Ident) if x == self.peek_text() => {
self.step();
return true;
},
(Token::DoubleColon, lexer::TokenKind::Colon) => {
self.step();
if !self.at_end() && matches!(self.next_token.kind, lexer::TokenKind::Colon) {
self.step();
return true;
}
return false;
},
#[rustfmt::skip]
(
Token::CaptureLitStr,
lexer::TokenKind::Literal {
kind:
lexer::LiteralKind::Str { terminated: true }
| lexer::LiteralKind::RawStr { n_hashes: Some(_) },
..
},
)
| (Token::CaptureIdent, lexer::TokenKind::Ident) => {
**captures.next().unwrap() = self.peek_text();
self.step();
return true;
},
_ => return false,
}
}
}
#[must_use]
pub fn find_token(&mut self, token: Token<'_>) -> bool {
let mut capture = [].iter_mut();
while !self.read_token(token, &mut capture) {
self.step();
if self.at_end() {
return false;
}
}
true
}
#[must_use]
pub fn find_capture_token(&mut self, token: Token<'_>) -> Option<&'txt str> {
let mut res = "";
let mut capture = &mut res;
let mut capture = slice::from_mut(&mut capture).iter_mut();
while !self.read_token(token, &mut capture) {
self.step();
if self.at_end() {
return None;
}
}
Some(res)
}
#[must_use]
pub fn match_tokens(&mut self, tokens: &[Token<'_>], captures: &mut [&mut &'txt str]) -> bool {
let mut captures = captures.iter_mut();
tokens.iter().all(|&t| self.read_token(t, &mut captures))
}
}
pub struct Lint {
pub name: String,
pub group: String,
@ -265,9 +90,9 @@ fn read_src_with_module(src_root: &Path) -> impl use<'_> + Iterator<Item = (DirE
/// Parse a source file looking for `declare_clippy_lint` macro invocations.
fn parse_clippy_lint_decls(path: &Path, contents: &str, module: &str, lints: &mut Vec<Lint>) {
#[allow(clippy::enum_glob_use)]
use Token::*;
use cursor::Pat::*;
#[rustfmt::skip]
static DECL_TOKENS: &[Token<'_>] = &[
static DECL_TOKENS: &[cursor::Pat<'_>] = &[
// !{ /// docs
Bang, OpenBrace, AnyComment,
// #[clippy::version = "version"]
@ -276,17 +101,17 @@ fn parse_clippy_lint_decls(path: &Path, contents: &str, module: &str, lints: &mu
Ident("pub"), CaptureIdent, Comma, AnyComment, CaptureIdent, Comma,
];
let mut searcher = RustSearcher::new(contents);
while searcher.find_token(Ident("declare_clippy_lint")) {
let start = searcher.pos() as usize - "declare_clippy_lint".len();
let mut cursor = Cursor::new(contents);
while cursor.find_pat(Ident("declare_clippy_lint")) {
let start = cursor.pos() as usize - "declare_clippy_lint".len();
let (mut name, mut group) = ("", "");
if searcher.match_tokens(DECL_TOKENS, &mut [&mut name, &mut group]) && searcher.find_token(CloseBrace) {
if cursor.match_all(DECL_TOKENS, &mut [&mut name, &mut group]) && cursor.find_pat(CloseBrace) {
lints.push(Lint {
name: name.to_lowercase(),
group: group.into(),
module: module.into(),
path: path.into(),
declaration_range: start..searcher.pos() as usize,
declaration_range: start..cursor.pos() as usize,
});
}
}
@ -295,21 +120,21 @@ fn parse_clippy_lint_decls(path: &Path, contents: &str, module: &str, lints: &mu
#[must_use]
pub fn read_deprecated_lints() -> (Vec<DeprecatedLint>, Vec<RenamedLint>) {
#[allow(clippy::enum_glob_use)]
use Token::*;
use cursor::Pat::*;
#[rustfmt::skip]
static DECL_TOKENS: &[Token<'_>] = &[
static DECL_TOKENS: &[cursor::Pat<'_>] = &[
// #[clippy::version = "version"]
Pound, OpenBracket, Ident("clippy"), DoubleColon, Ident("version"), Eq, CaptureLitStr, CloseBracket,
// ("first", "second"),
OpenParen, CaptureLitStr, Comma, CaptureLitStr, CloseParen, Comma,
];
#[rustfmt::skip]
static DEPRECATED_TOKENS: &[Token<'_>] = &[
static DEPRECATED_TOKENS: &[cursor::Pat<'_>] = &[
// !{ DEPRECATED(DEPRECATED_VERSION) = [
Bang, OpenBrace, Ident("DEPRECATED"), OpenParen, Ident("DEPRECATED_VERSION"), CloseParen, Eq, OpenBracket,
];
#[rustfmt::skip]
static RENAMED_TOKENS: &[Token<'_>] = &[
static RENAMED_TOKENS: &[cursor::Pat<'_>] = &[
// !{ RENAMED(RENAMED_VERSION) = [
Bang, OpenBrace, Ident("RENAMED"), OpenParen, Ident("RENAMED_VERSION"), CloseParen, Eq, OpenBracket,
];
@ -320,19 +145,19 @@ pub fn read_deprecated_lints() -> (Vec<DeprecatedLint>, Vec<RenamedLint>) {
let mut contents = String::new();
File::open_read_to_cleared_string(path, &mut contents);
let mut searcher = RustSearcher::new(&contents);
let mut cursor = Cursor::new(&contents);
// First instance is the macro definition.
assert!(
searcher.find_token(Ident("declare_with_version")),
cursor.find_pat(Ident("declare_with_version")),
"error reading deprecated lints"
);
if searcher.find_token(Ident("declare_with_version")) && searcher.match_tokens(DEPRECATED_TOKENS, &mut []) {
if cursor.find_pat(Ident("declare_with_version")) && cursor.match_all(DEPRECATED_TOKENS, &mut []) {
let mut version = "";
let mut name = "";
let mut reason = "";
while searcher.match_tokens(DECL_TOKENS, &mut [&mut version, &mut name, &mut reason]) {
while cursor.match_all(DECL_TOKENS, &mut [&mut version, &mut name, &mut reason]) {
deprecated.push(DeprecatedLint {
name: parse_str_single_line(path.as_ref(), name),
reason: parse_str_single_line(path.as_ref(), reason),
@ -343,11 +168,11 @@ pub fn read_deprecated_lints() -> (Vec<DeprecatedLint>, Vec<RenamedLint>) {
panic!("error reading deprecated lints");
}
if searcher.find_token(Ident("declare_with_version")) && searcher.match_tokens(RENAMED_TOKENS, &mut []) {
if cursor.find_pat(Ident("declare_with_version")) && cursor.match_all(RENAMED_TOKENS, &mut []) {
let mut version = "";
let mut old_name = "";
let mut new_name = "";
while searcher.match_tokens(DECL_TOKENS, &mut [&mut version, &mut old_name, &mut new_name]) {
while cursor.match_all(DECL_TOKENS, &mut [&mut version, &mut old_name, &mut new_name]) {
renamed.push(RenamedLint {
old_name: parse_str_single_line(path.as_ref(), old_name),
new_name: parse_str_single_line(path.as_ref(), new_name),

View file

@ -0,0 +1,212 @@
use core::slice;
use rustc_lexer::{self as lex, LiteralKind, Token, TokenKind};
/// A token pattern used for searching and matching by the [`Cursor`].
///
/// In the event that a pattern is a multi-token sequence, earlier tokens will be consumed
/// even if the pattern ultimately isn't matched. e.g. With the sequence `:*` matching
/// `DoubleColon` will consume the first `:` and then fail to match, leaving the cursor at
/// the `*`.
#[derive(Clone, Copy)]
pub enum Pat<'a> {
/// Matches any number of comments and doc comments.
AnyComment,
Ident(&'a str),
CaptureIdent,
LitStr,
CaptureLitStr,
Bang,
CloseBrace,
CloseBracket,
CloseParen,
Comma,
DoubleColon,
Eq,
Lifetime,
Lt,
Gt,
OpenBrace,
OpenBracket,
OpenParen,
Pound,
Semi,
}
/// A unidirectional cursor over a token stream that is lexed on demand.
pub struct Cursor<'txt> {
next_token: Token,
pos: u32,
inner: lex::Cursor<'txt>,
text: &'txt str,
}
impl<'txt> Cursor<'txt> {
#[must_use]
pub fn new(text: &'txt str) -> Self {
let mut inner = lex::Cursor::new(text, lex::FrontmatterAllowed::Yes);
Self {
next_token: inner.advance_token(),
pos: 0,
inner,
text,
}
}
/// Gets the text that makes up the next token in the stream, or the empty string if
/// stream is exhausted.
#[must_use]
pub fn peek_text(&self) -> &'txt str {
&self.text[self.pos as usize..(self.pos + self.next_token.len) as usize]
}
/// Gets the length of the next token in bytes, or zero if the stream is exhausted.
#[must_use]
pub fn peek_len(&self) -> u32 {
self.next_token.len
}
/// Gets the next token in the stream, or [`TokenKind::Eof`] if the stream is
/// exhausted.
#[must_use]
pub fn peek(&self) -> TokenKind {
self.next_token.kind
}
/// Gets the offset of the next token in the source string, or the string's length if
/// the stream is exhausted.
#[must_use]
pub fn pos(&self) -> u32 {
self.pos
}
/// Gets whether the cursor has exhausted its input.
#[must_use]
pub fn at_end(&self) -> bool {
self.next_token.kind == TokenKind::Eof
}
/// Advances the cursor to the next token. If the stream is exhausted this will set
/// the next token to [`TokenKind::Eof`].
pub fn step(&mut self) {
// `next_token.len` is zero for the eof marker.
self.pos += self.next_token.len;
self.next_token = self.inner.advance_token();
}
/// Consumes tokens until the given pattern is either fully matched of fails to match.
/// Returns whether the pattern was fully matched.
///
/// For each capture made by the pattern one item will be taken from the capture
/// sequence with the result placed inside.
fn match_pat(&mut self, pat: Pat<'_>, captures: &mut slice::IterMut<'_, &mut &'txt str>) -> bool {
loop {
match (pat, self.next_token.kind) {
#[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/6697
(_, TokenKind::Whitespace)
| (
Pat::AnyComment,
TokenKind::BlockComment { terminated: true, .. } | TokenKind::LineComment { .. },
) => self.step(),
(Pat::AnyComment, _) => return true,
(Pat::Bang, TokenKind::Bang)
| (Pat::CloseBrace, TokenKind::CloseBrace)
| (Pat::CloseBracket, TokenKind::CloseBracket)
| (Pat::CloseParen, TokenKind::CloseParen)
| (Pat::Comma, TokenKind::Comma)
| (Pat::Eq, TokenKind::Eq)
| (Pat::Lifetime, TokenKind::Lifetime { .. })
| (Pat::Lt, TokenKind::Lt)
| (Pat::Gt, TokenKind::Gt)
| (Pat::OpenBrace, TokenKind::OpenBrace)
| (Pat::OpenBracket, TokenKind::OpenBracket)
| (Pat::OpenParen, TokenKind::OpenParen)
| (Pat::Pound, TokenKind::Pound)
| (Pat::Semi, TokenKind::Semi)
| (
Pat::LitStr,
TokenKind::Literal {
kind: LiteralKind::Str { terminated: true } | LiteralKind::RawStr { .. },
..
},
) => {
self.step();
return true;
},
(Pat::Ident(x), TokenKind::Ident) if x == self.peek_text() => {
self.step();
return true;
},
(Pat::DoubleColon, TokenKind::Colon) => {
self.step();
if !self.at_end() && matches!(self.next_token.kind, TokenKind::Colon) {
self.step();
return true;
}
return false;
},
#[rustfmt::skip]
(
Pat::CaptureLitStr,
TokenKind::Literal {
kind:
LiteralKind::Str { terminated: true }
| LiteralKind::RawStr { n_hashes: Some(_) },
..
},
)
| (Pat::CaptureIdent, TokenKind::Ident) => {
**captures.next().unwrap() = self.peek_text();
self.step();
return true;
},
_ => return false,
}
}
}
/// Continually attempt to match the pattern on subsequent tokens until a match is
/// found. Returns whether the pattern was successfully matched.
///
/// Not generally suitable for multi-token patterns or patterns that can match
/// nothing.
#[must_use]
pub fn find_pat(&mut self, token: Pat<'_>) -> bool {
let mut capture = [].iter_mut();
while !self.match_pat(token, &mut capture) {
self.step();
if self.at_end() {
return false;
}
}
true
}
/// The same as [`Self::find_pat`], but returns a capture as well.
#[must_use]
pub fn find_capture_pat(&mut self, token: Pat<'_>) -> Option<&'txt str> {
let mut res = "";
let mut capture = &mut res;
let mut capture = slice::from_mut(&mut capture).iter_mut();
while !self.match_pat(token, &mut capture) {
self.step();
if self.at_end() {
return None;
}
}
Some(res)
}
/// Attempts to match a sequence of patterns at the current position. Returns whether
/// the match is successful.
///
/// Captures will be written to the given slice in the order they're matched. If a
/// capture is matched, but there are no more capture slots this will panic. If the
/// match is completed without filling all the capture slots they will be left
/// unmodified.
///
/// If the match fails the cursor will be positioned at the first failing token.
#[must_use]
pub fn match_all(&mut self, tokens: &[Pat<'_>], captures: &mut [&mut &'txt str]) -> bool {
let mut captures = captures.iter_mut();
tokens.iter().all(|&t| self.match_pat(t, &mut captures))
}
}

View file

@ -1,4 +1,5 @@
use crate::parse::{RenamedLint, RustSearcher, Token, find_lint_decls, read_deprecated_lints};
use crate::parse::cursor::{self, Cursor};
use crate::parse::{RenamedLint, find_lint_decls, read_deprecated_lints};
use crate::update_lints::generate_lint_files;
use crate::utils::{
ErrAction, FileUpdater, UpdateMode, UpdateStatus, Version, delete_dir_if_exists, delete_file_if_exists,
@ -279,47 +280,49 @@ fn file_update_fn<'a, 'b>(
move |_, src, dst| {
let mut copy_pos = 0u32;
let mut changed = false;
let mut searcher = RustSearcher::new(src);
let mut cursor = Cursor::new(src);
let mut capture = "";
loop {
match searcher.peek() {
match cursor.peek() {
TokenKind::Eof => break,
TokenKind::Ident => {
let match_start = searcher.pos();
let text = searcher.peek_text();
searcher.step();
let match_start = cursor.pos();
let text = cursor.peek_text();
cursor.step();
match text {
// clippy::line_name or clippy::lint-name
"clippy" => {
if searcher.match_tokens(&[Token::DoubleColon, Token::CaptureIdent], &mut [&mut capture])
&& capture == old_name
if cursor.match_all(
&[cursor::Pat::DoubleColon, cursor::Pat::CaptureIdent],
&mut [&mut capture],
) && capture == old_name
{
dst.push_str(&src[copy_pos as usize..searcher.pos() as usize - capture.len()]);
dst.push_str(&src[copy_pos as usize..cursor.pos() as usize - capture.len()]);
dst.push_str(new_name);
copy_pos = searcher.pos();
copy_pos = cursor.pos();
changed = true;
}
},
// mod lint_name
"mod" => {
if !matches!(mod_edit, ModEdit::None)
&& searcher.match_tokens(&[Token::CaptureIdent], &mut [&mut capture])
&& cursor.match_all(&[cursor::Pat::CaptureIdent], &mut [&mut capture])
&& capture == old_name
{
match mod_edit {
ModEdit::Rename => {
dst.push_str(&src[copy_pos as usize..searcher.pos() as usize - capture.len()]);
dst.push_str(&src[copy_pos as usize..cursor.pos() as usize - capture.len()]);
dst.push_str(new_name);
copy_pos = searcher.pos();
copy_pos = cursor.pos();
changed = true;
},
ModEdit::Delete if searcher.match_tokens(&[Token::Semi], &mut []) => {
ModEdit::Delete if cursor.match_all(&[cursor::Pat::Semi], &mut []) => {
let mut start = &src[copy_pos as usize..match_start as usize];
if start.ends_with("\n\n") {
start = &start[..start.len() - 1];
}
dst.push_str(start);
copy_pos = searcher.pos();
copy_pos = cursor.pos();
if src[copy_pos as usize..].starts_with("\n\n") {
copy_pos += 1;
}
@ -331,8 +334,8 @@ fn file_update_fn<'a, 'b>(
},
// lint_name::
name if matches!(mod_edit, ModEdit::Rename) && name == old_name => {
let name_end = searcher.pos();
if searcher.match_tokens(&[Token::DoubleColon], &mut []) {
let name_end = cursor.pos();
if cursor.match_all(&[cursor::Pat::DoubleColon], &mut []) {
dst.push_str(&src[copy_pos as usize..match_start as usize]);
dst.push_str(new_name);
copy_pos = name_end;
@ -350,36 +353,38 @@ fn file_update_fn<'a, 'b>(
};
dst.push_str(&src[copy_pos as usize..match_start as usize]);
dst.push_str(replacement);
copy_pos = searcher.pos();
copy_pos = cursor.pos();
changed = true;
},
}
},
// //~ lint_name
TokenKind::LineComment { doc_style: None } => {
let text = searcher.peek_text();
let text = cursor.peek_text();
if text.starts_with("//~")
&& let Some(text) = text.strip_suffix(old_name)
&& !text.ends_with(|c| matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_'))
{
dst.push_str(&src[copy_pos as usize..searcher.pos() as usize + text.len()]);
dst.push_str(&src[copy_pos as usize..cursor.pos() as usize + text.len()]);
dst.push_str(new_name);
copy_pos = searcher.pos() + searcher.peek_len();
copy_pos = cursor.pos() + cursor.peek_len();
changed = true;
}
searcher.step();
cursor.step();
},
// ::lint_name
TokenKind::Colon
if searcher.match_tokens(&[Token::DoubleColon, Token::CaptureIdent], &mut [&mut capture])
&& capture == old_name =>
if cursor.match_all(
&[cursor::Pat::DoubleColon, cursor::Pat::CaptureIdent],
&mut [&mut capture],
) && capture == old_name =>
{
dst.push_str(&src[copy_pos as usize..searcher.pos() as usize - capture.len()]);
dst.push_str(&src[copy_pos as usize..cursor.pos() as usize - capture.len()]);
dst.push_str(new_name);
copy_pos = searcher.pos();
copy_pos = cursor.pos();
changed = true;
},
_ => searcher.step(),
_ => cursor.step(),
}
}

View file

@ -1,4 +1,5 @@
use crate::parse::{DeprecatedLint, Lint, RenamedLint, RustSearcher, Token, find_lint_decls, read_deprecated_lints};
use crate::parse::cursor::{self, Cursor};
use crate::parse::{DeprecatedLint, Lint, RenamedLint, find_lint_decls, read_deprecated_lints};
use crate::utils::{FileUpdater, UpdateMode, UpdateStatus, update_text_region_fn};
use itertools::Itertools;
use std::collections::HashSet;
@ -75,13 +76,13 @@ pub fn generate_lint_files(
update_mode,
"clippy_lints/src/deprecated_lints.rs",
&mut |_, src, dst| {
let mut searcher = RustSearcher::new(src);
let mut cursor = Cursor::new(src);
assert!(
searcher.find_token(Token::Ident("declare_with_version"))
&& searcher.find_token(Token::Ident("declare_with_version")),
cursor.find_pat(cursor::Pat::Ident("declare_with_version"))
&& cursor.find_pat(cursor::Pat::Ident("declare_with_version")),
"error reading deprecated lints"
);
dst.push_str(&src[..searcher.pos() as usize]);
dst.push_str(&src[..cursor.pos() as usize]);
dst.push_str("! { DEPRECATED(DEPRECATED_VERSION) = [\n");
for lint in deprecated {
write!(