Implement incremental caching for derive macro expansions

This commit is contained in:
Felix Rath 2025-08-13 13:05:07 +02:00 committed by Jakub Beránek
parent a6acf0f07f
commit 8fa2f693bb
No known key found for this signature in database
GPG key ID: 909CD0D26483516B
21 changed files with 284 additions and 72 deletions

View file

@ -3887,11 +3887,13 @@ dependencies = [
"rustc_lexer",
"rustc_lint_defs",
"rustc_macros",
"rustc_middle",
"rustc_parse",
"rustc_proc_macro",
"rustc_serialize",
"rustc_session",
"rustc_span",
"scoped-tls",
"smallvec",
"thin-vec",
"tracing",

View file

@ -3348,7 +3348,8 @@ impl UseTree {
/// Distinguishes between `Attribute`s that decorate items and Attributes that
/// are contained as statements within items. These two cases need to be
/// distinguished for pretty-printing.
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic, Walkable)]
#[derive(Clone, PartialEq, Eq, Hash, Debug, Copy)]
#[derive(Encodable, Decodable, HashStable_Generic, Walkable)]
pub enum AttrStyle {
Outer,
Inner,

View file

@ -40,13 +40,13 @@ impl DocFragmentKind {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum CommentKind {
Line,
Block,
}
#[derive(Copy, Clone, PartialEq, Debug, Encodable, Decodable, HashStable_Generic)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable, HashStable_Generic)]
pub enum InvisibleOrigin {
// From the expansion of a metavariable in a declarative macro.
MetaVar(MetaVarKind),
@ -123,7 +123,7 @@ impl fmt::Display for MetaVarKind {
/// Describes how a sequence of token trees is delimited.
/// Cannot use `proc_macro::Delimiter` directly because this
/// structure should implement some additional traits.
#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
pub enum Delimiter {
/// `( ... )`
Parenthesis,
@ -186,7 +186,7 @@ impl Delimiter {
// type. This means that float literals like `1f32` are classified by this type
// as `Int`. Only upon conversion to `ast::LitKind` will such a literal be
// given the `Float` kind.
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum LitKind {
Bool, // AST only, must never appear in a `Token`
Byte,
@ -203,7 +203,7 @@ pub enum LitKind {
}
/// A literal token.
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
pub kind: LitKind,
pub symbol: Symbol,
@ -349,7 +349,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
.contains(&name)
}
#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)]
#[derive(PartialEq, Eq, Encodable, Decodable, Hash, Debug, Copy, Clone, HashStable_Generic)]
pub enum IdentIsRaw {
No,
Yes,
@ -376,7 +376,7 @@ impl From<bool> for IdentIsRaw {
}
}
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum TokenKind {
/* Expression-operator symbols. */
/// `=`
@ -526,7 +526,7 @@ pub enum TokenKind {
Eof,
}
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,

View file

@ -5,6 +5,7 @@
//! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens.
use std::borrow::Cow;
use std::hash::Hash;
use std::ops::Range;
use std::sync::Arc;
use std::{cmp, fmt, iter, mem};
@ -22,7 +23,7 @@ use crate::token::{self, Delimiter, Token, TokenKind};
use crate::{AttrVec, Attribute};
/// Part of a `TokenStream`.
#[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
pub enum TokenTree {
/// A single token. Should never be `OpenDelim` or `CloseDelim`, because
/// delimiters are implicitly represented by `Delimited`.
@ -538,7 +539,7 @@ pub struct AttrsTarget {
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
/// guide pretty-printing, which is where the `JointHidden` value (which isn't
/// part of `proc_macro::Spacing`) comes in useful.
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
pub enum Spacing {
/// The token cannot join with the following token to form a compound
/// token.
@ -595,7 +596,7 @@ pub enum Spacing {
}
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Encodable, Decodable)]
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
impl TokenStream {
@ -811,14 +812,6 @@ impl TokenStream {
}
}
impl PartialEq<TokenStream> for TokenStream {
fn eq(&self, other: &TokenStream) -> bool {
self.iter().eq(other.iter())
}
}
impl Eq for TokenStream {}
impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
@ -970,7 +963,8 @@ impl TokenCursor {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic, Walkable)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Encodable, Decodable, HashStable_Generic, Walkable)]
pub struct DelimSpan {
pub open: Span,
pub close: Span,
@ -994,7 +988,7 @@ impl DelimSpan {
}
}
#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)]
pub struct DelimSpacing {
pub open: Spacing,
pub close: Spacing,

View file

@ -21,6 +21,7 @@ rustc_hir = { path = "../rustc_hir" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_lint_defs = { path = "../rustc_lint_defs" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
rustc_parse = { path = "../rustc_parse" }
# We must use the proc_macro version that we will compile proc-macros against,
# not the one from our own sysroot.
@ -28,6 +29,7 @@ rustc_proc_macro = { path = "../rustc_proc_macro" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
scoped-tls = "1.0"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
tracing = "0.1"

View file

@ -29,4 +29,8 @@ pub mod module;
#[allow(rustc::untranslatable_diagnostic)]
pub mod proc_macro;
pub fn provide(providers: &mut rustc_middle::query::Providers) {
providers.derive_macro_expansion = proc_macro::provide_derive_macro_expansion;
}
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }

View file

@ -1,9 +1,11 @@
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::ErrorGuaranteed;
use rustc_middle::ty::{self, TyCtxt};
use rustc_parse::parser::{ForceCollect, Parser};
use rustc_session::Session;
use rustc_session::config::ProcMacroExecutionStrategy;
use rustc_span::Span;
use rustc_span::profiling::SpannedEventArgRecorder;
use rustc_span::{LocalExpnId, Span};
use {rustc_ast as ast, rustc_proc_macro as pm};
use crate::base::{self, *};
@ -30,9 +32,9 @@ impl<T> pm::bridge::server::MessagePipe<T> for MessagePipe<T> {
}
}
fn exec_strategy(ecx: &ExtCtxt<'_>) -> impl pm::bridge::server::ExecutionStrategy + 'static {
fn exec_strategy(sess: &Session) -> impl pm::bridge::server::ExecutionStrategy + 'static {
pm::bridge::server::MaybeCrossThread::<MessagePipe<_>>::new(
ecx.sess.opts.unstable_opts.proc_macro_execution_strategy
sess.opts.unstable_opts.proc_macro_execution_strategy
== ProcMacroExecutionStrategy::CrossThread,
)
}
@ -54,7 +56,7 @@ impl base::BangProcMacro for BangProcMacro {
});
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let strategy = exec_strategy(ecx);
let strategy = exec_strategy(ecx.sess);
let server = proc_macro_server::Rustc::new(ecx);
self.client.run(&strategy, server, input, proc_macro_backtrace).map_err(|e| {
ecx.dcx().emit_err(errors::ProcMacroPanicked {
@ -85,7 +87,7 @@ impl base::AttrProcMacro for AttrProcMacro {
});
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let strategy = exec_strategy(ecx);
let strategy = exec_strategy(ecx.sess);
let server = proc_macro_server::Rustc::new(ecx);
self.client.run(&strategy, server, annotation, annotated, proc_macro_backtrace).map_err(
|e| {
@ -101,7 +103,7 @@ impl base::AttrProcMacro for AttrProcMacro {
}
pub struct DeriveProcMacro {
pub client: pm::bridge::client::Client<pm::TokenStream, pm::TokenStream>,
pub client: DeriveClient,
}
impl MultiItemModifier for DeriveProcMacro {
@ -113,6 +115,13 @@ impl MultiItemModifier for DeriveProcMacro {
item: Annotatable,
_is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
let _timer = ecx.sess.prof.generic_activity_with_arg_recorder(
"expand_derive_proc_macro_outer",
|recorder| {
recorder.record_arg_with_span(ecx.sess.source_map(), ecx.expansion_descr(), span);
},
);
// We need special handling for statement items
// (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`)
let is_stmt = matches!(item, Annotatable::Stmt(..));
@ -123,36 +132,31 @@ impl MultiItemModifier for DeriveProcMacro {
// altogether. See #73345.
crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.psess);
let input = item.to_tokens();
let stream = {
let _timer =
ecx.sess.prof.generic_activity_with_arg_recorder("expand_proc_macro", |recorder| {
recorder.record_arg_with_span(
ecx.sess.source_map(),
ecx.expansion_descr(),
span,
);
});
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let strategy = exec_strategy(ecx);
let server = proc_macro_server::Rustc::new(ecx);
match self.client.run(&strategy, server, input, proc_macro_backtrace) {
Ok(stream) => stream,
Err(e) => {
ecx.dcx().emit_err({
errors::ProcMacroDerivePanicked {
span,
message: e.as_str().map(|message| {
errors::ProcMacroDerivePanickedHelp { message: message.into() }
}),
}
});
return ExpandResult::Ready(vec![]);
}
}
let invoc_id = ecx.current_expansion.id;
let res = if ecx.sess.opts.incremental.is_some()
&& ecx.sess.opts.unstable_opts.cache_proc_macros
{
ty::tls::with(|tcx| {
let input = &*tcx.arena.alloc(input);
let key: (LocalExpnId, &TokenStream) = (invoc_id, input);
QueryDeriveExpandCtx::enter(ecx, self.client, move || {
tcx.derive_macro_expansion(key).cloned()
})
})
} else {
expand_derive_macro(invoc_id, input, ecx, self.client)
};
let Ok(output) = res else {
// error will already have been emitted
return ExpandResult::Ready(vec![]);
};
let error_count_before = ecx.dcx().err_count();
let mut parser = Parser::new(&ecx.sess.psess, stream, Some("proc-macro derive"));
let mut parser = Parser::new(&ecx.sess.psess, output, Some("proc-macro derive"));
let mut items = vec![];
loop {
@ -180,3 +184,101 @@ impl MultiItemModifier for DeriveProcMacro {
ExpandResult::Ready(items)
}
}
/// Provide a query for computing the output of a derive macro.
pub(super) fn provide_derive_macro_expansion<'tcx>(
tcx: TyCtxt<'tcx>,
key: (LocalExpnId, &'tcx TokenStream),
) -> Result<&'tcx TokenStream, ()> {
let (invoc_id, input) = key;
// Make sure that we invalidate the query when the crate defining the proc macro changes
let _ = tcx.crate_hash(invoc_id.expn_data().macro_def_id.unwrap().krate);
QueryDeriveExpandCtx::with(|ecx, client| {
expand_derive_macro(invoc_id, input.clone(), ecx, client).map(|ts| &*tcx.arena.alloc(ts))
})
}
type DeriveClient = pm::bridge::client::Client<pm::TokenStream, pm::TokenStream>;
fn expand_derive_macro(
invoc_id: LocalExpnId,
input: TokenStream,
ecx: &mut ExtCtxt<'_>,
client: DeriveClient,
) -> Result<TokenStream, ()> {
let _timer =
ecx.sess.prof.generic_activity_with_arg_recorder("expand_proc_macro", |recorder| {
let invoc_expn_data = invoc_id.expn_data();
let span = invoc_expn_data.call_site;
let event_arg = invoc_expn_data.kind.descr();
recorder.record_arg_with_span(ecx.sess.source_map(), event_arg.clone(), span);
});
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let strategy = exec_strategy(ecx.sess);
let server = proc_macro_server::Rustc::new(ecx);
match client.run(&strategy, server, input, proc_macro_backtrace) {
Ok(stream) => Ok(stream),
Err(e) => {
let invoc_expn_data = invoc_id.expn_data();
let span = invoc_expn_data.call_site;
ecx.dcx().emit_err({
errors::ProcMacroDerivePanicked {
span,
message: e.as_str().map(|message| errors::ProcMacroDerivePanickedHelp {
message: message.into(),
}),
}
});
Err(())
}
}
}
/// Stores the context necessary to expand a derive proc macro via a query.
struct QueryDeriveExpandCtx {
/// Type-erased version of `&mut ExtCtxt`
expansion_ctx: *mut (),
client: DeriveClient,
}
impl QueryDeriveExpandCtx {
/// Store the extension context and the client into the thread local value.
/// It will be accessible via the `with` method while `f` is active.
fn enter<F, R>(ecx: &mut ExtCtxt<'_>, client: DeriveClient, f: F) -> R
where
F: FnOnce() -> R,
{
// We need erasure to get rid of the lifetime
let ctx = Self { expansion_ctx: ecx as *mut _ as *mut (), client };
DERIVE_EXPAND_CTX.set(&ctx, || f())
}
/// Accesses the thread local value of the derive expansion context.
/// Must be called while the `enter` function is active.
fn with<F, R>(f: F) -> R
where
F: for<'a, 'b> FnOnce(&'b mut ExtCtxt<'a>, DeriveClient) -> R,
{
DERIVE_EXPAND_CTX.with(|ctx| {
let ectx = {
let casted = ctx.expansion_ctx.cast::<ExtCtxt<'_>>();
// SAFETY: We can only get the value from `with` while the `enter` function
// is active (on the callstack), and that function's signature ensures that the
// lifetime is valid.
// If `with` is called at some other time, it will panic due to usage of
// `scoped_tls::with`.
unsafe { casted.as_mut().unwrap() }
};
f(ectx, ctx.client)
})
}
}
// When we invoke a query to expand a derive proc macro, we need to provide it with the expansion
// context and derive Client. We do that using a thread-local.
scoped_tls::scoped_thread_local!(static DERIVE_EXPAND_CTX: QueryDeriveExpandCtx);

View file

@ -26,7 +26,7 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::{
Attribute, ImplItemKind, ItemKind as HirItem, Node as HirNode, TraitItemKind, intravisit,
};
use rustc_middle::dep_graph::{DepNode, DepNodeExt, label_strs};
use rustc_middle::dep_graph::{DepNode, DepNodeExt, dep_kind_from_label, label_strs};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::TyCtxt;
use rustc_span::{Span, Symbol, sym};
@ -357,17 +357,6 @@ impl<'tcx> DirtyCleanVisitor<'tcx> {
}
}
fn assert_loaded_from_disk(&self, item_span: Span, dep_node: DepNode) {
debug!("assert_loaded_from_disk({:?})", dep_node);
if !self.tcx.dep_graph.debug_was_loaded_from_disk(dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx
.dcx()
.emit_err(errors::NotLoaded { span: item_span, dep_node_str: &dep_node_str });
}
}
fn check_item(&mut self, item_id: LocalDefId) {
let item_span = self.tcx.def_span(item_id.to_def_id());
let def_path_hash = self.tcx.def_path_hash(item_id.to_def_id());
@ -385,8 +374,27 @@ impl<'tcx> DirtyCleanVisitor<'tcx> {
self.assert_dirty(item_span, dep_node);
}
for label in assertion.loaded_from_disk.items().into_sorted_stable_ord() {
let dep_node = DepNode::from_label_string(self.tcx, label, def_path_hash).unwrap();
self.assert_loaded_from_disk(item_span, dep_node);
match DepNode::from_label_string(self.tcx, label, def_path_hash) {
Ok(dep_node) => {
if !self.tcx.dep_graph.debug_was_loaded_from_disk(dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx.dcx().emit_err(errors::NotLoaded {
span: item_span,
dep_node_str: &dep_node_str,
});
}
}
// Opaque/unit hash, we only know the dep kind
Err(()) => {
let dep_kind = dep_kind_from_label(label);
if !self.tcx.dep_graph.debug_dep_kind_was_loaded_from_disk(dep_kind) {
self.tcx.dcx().emit_err(errors::NotLoaded {
span: item_span,
dep_node_str: &label,
});
}
}
}
}
}
}

View file

@ -889,6 +889,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
providers.queries.env_var_os = env_var_os;
limits::provide(&mut providers.queries);
proc_macro_decls::provide(&mut providers.queries);
rustc_expand::provide(&mut providers.queries);
rustc_const_eval::provide(providers);
rustc_middle::hir::provide(&mut providers.queries);
rustc_borrowck::provide(&mut providers.queries);

View file

@ -119,6 +119,7 @@ macro_rules! arena_types {
[decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph,
[] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls,
[] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>,
[decode] token_stream: rustc_ast::tokenstream::TokenStream,
]);
)
}

View file

@ -176,6 +176,12 @@ impl DepNodeExt for DepNode {
}
}
/// Maps a query label to its DepKind. Panics if a query with the given label does not exist.
pub fn dep_kind_from_label(label: &str) -> DepKind {
dep_kind_from_label_string(label)
.unwrap_or_else(|_| panic!("Query label {label} does not exist"))
}
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for () {
#[inline(always)]
fn fingerprint_style() -> FingerprintStyle {

View file

@ -8,7 +8,7 @@ use crate::ty::{self, TyCtxt};
#[macro_use]
mod dep_node;
pub use dep_node::{DepKind, DepNode, DepNodeExt, dep_kinds, label_strs};
pub use dep_node::{DepKind, DepNode, DepNodeExt, dep_kind_from_label, dep_kinds, label_strs};
pub(crate) use dep_node::{make_compile_codegen_unit, make_compile_mono_item, make_metadata};
pub use rustc_query_system::dep_graph::debug::{DepNodeFilter, EdgeFilter};
pub use rustc_query_system::dep_graph::{

View file

@ -2,6 +2,7 @@ use std::ffi::OsStr;
use std::intrinsics::transmute_unchecked;
use std::mem::MaybeUninit;
use rustc_ast::tokenstream::TokenStream;
use rustc_span::ErrorGuaranteed;
use rustc_span::source_map::Spanned;
@ -188,6 +189,10 @@ impl EraseType
>()];
}
impl EraseType for Result<&'_ TokenStream, ()> {
type Result = [u8; size_of::<Result<&'static TokenStream, ()>>()];
}
impl<T> EraseType for Option<&'_ T> {
type Result = [u8; size_of::<Option<&'static ()>>()];
}

View file

@ -2,11 +2,12 @@
use std::ffi::OsStr;
use rustc_ast::tokenstream::TokenStream;
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId};
use rustc_hir::hir_id::{HirId, OwnerId};
use rustc_query_system::dep_graph::DepNodeIndex;
use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache};
use rustc_span::{DUMMY_SP, Ident, Span, Symbol};
use rustc_span::{DUMMY_SP, Ident, LocalExpnId, Span, Symbol};
use crate::infer::canonical::CanonicalQueryInput;
use crate::mir::mono::CollectionMode;
@ -616,6 +617,19 @@ impl Key for (LocalDefId, HirId) {
}
}
impl<'tcx> Key for (LocalExpnId, &'tcx TokenStream) {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
self.0.expn_data().call_site
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
None
}
}
impl<'tcx> Key for (ValidityRequirement, ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) {
type Cache<V> = DefaultCache<Self, V>;

View file

@ -70,6 +70,7 @@ use std::sync::Arc;
use rustc_abi::Align;
use rustc_arena::TypedArena;
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_ast::tokenstream::TokenStream;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::steal::Steal;
@ -96,7 +97,7 @@ use rustc_session::cstore::{
use rustc_session::lint::LintExpectationId;
use rustc_span::def_id::LOCAL_CRATE;
use rustc_span::source_map::Spanned;
use rustc_span::{DUMMY_SP, Span, Symbol};
use rustc_span::{DUMMY_SP, LocalExpnId, Span, Symbol};
use rustc_target::spec::PanicStrategy;
use {rustc_abi as abi, rustc_ast as ast, rustc_hir as hir};
@ -163,6 +164,17 @@ pub mod plumbing;
// Queries marked with `fatal_cycle` do not need the latter implementation,
// as they will raise an fatal error on query cycles instead.
rustc_queries! {
/// Caches the expansion of a derive proc macro, e.g. `#[derive(Serialize)]`.
/// The key is:
/// - A unique key corresponding to the invocation of a macro.
/// - Token stream which serves as an input to the macro.
///
/// The output is the token stream generated by the proc macro.
query derive_macro_expansion(key: (LocalExpnId, &'tcx TokenStream)) -> Result<&'tcx TokenStream, ()> {
desc { "expanding a derive (proc) macro" }
cache_on_disk_if { true }
}
/// This exists purely for testing the interactions between delayed bugs and incremental.
query trigger_delayed_bug(key: DefId) {
desc { "triggering a delayed bug for testing incremental" }

View file

@ -785,6 +785,13 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
}
}
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx rustc_ast::tokenstream::TokenStream {
#[inline]
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d)
}
}
macro_rules! impl_ref_decoder {
(<$tcx:tt> $($ty:ty,)*) => {
$(impl<'a, $tcx> Decodable<CacheDecoder<'a, $tcx>> for &$tcx [$ty] {

View file

@ -795,6 +795,18 @@ impl<D: Deps> DepGraph<D> {
self.data.as_ref().unwrap().debug_loaded_from_disk.lock().contains(&dep_node)
}
pub fn debug_dep_kind_was_loaded_from_disk(&self, dep_kind: DepKind) -> bool {
// We only check if we have a dep node corresponding to the given dep kind.
#[allow(rustc::potential_query_instability)]
self.data
.as_ref()
.unwrap()
.debug_loaded_from_disk
.lock()
.iter()
.any(|node| node.kind == dep_kind)
}
#[cfg(debug_assertions)]
#[inline(always)]
pub(crate) fn register_dep_node_debug_str<F>(&self, dep_node: DepNode, debug_str_gen: F)

View file

@ -2278,6 +2278,8 @@ options! {
"set options for branch target identification and pointer authentication on AArch64"),
build_sdylib_interface: bool = (false, parse_bool, [UNTRACKED],
"whether the stable interface is being built"),
cache_proc_macros: bool = (false, parse_bool, [TRACKED],
"cache the results of derive proc macro invocations (potentially unsound!) (default: no"),
cf_protection: CFProtection = (CFProtection::None, parse_cfprotection, [TRACKED],
"instrument control-flow architecture protection"),
check_cfg_all_expected: bool = (false, parse_bool, [UNTRACKED],

View file

@ -1571,3 +1571,9 @@ impl<CTX: HashStableContext> HashStable<CTX> for ExpnId {
hash.hash_stable(ctx, hasher);
}
}
impl<CTX: HashStableContext> HashStable<CTX> for LocalExpnId {
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
self.to_expn_id().hash_stable(hcx, hasher);
}
}

View file

@ -0,0 +1,15 @@
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Nothing)]
pub fn derive(_input: TokenStream) -> TokenStream {
return r#"
pub mod nothing_mod {
pub fn nothing() {
eprintln!("nothing");
}
}
"#
.parse()
.unwrap();
}

View file

@ -0,0 +1,18 @@
// This test tests that derive proc macro execution is cached.
//@ proc-macro:derive_nothing.rs
//@ revisions:rpass1 rpass2
//@ compile-flags: -Zquery-dep-graph -Zcache-proc-macros
//@ ignore-backends: gcc
#![feature(rustc_attrs)]
#[macro_use]
extern crate derive_nothing;
#[cfg(any(rpass1, rpass2))]
#[rustc_clean(cfg = "rpass2", loaded_from_disk = "derive_macro_expansion")]
#[derive(Nothing)]
pub struct Foo;
fn main() {}