Auto merge of #142794 - tgross35:rollup-iae7okj, r=tgross35

Rollup of 9 pull requests

Successful merges:

 - rust-lang/rust#142331 (Add `trim_prefix` and `trim_suffix` methods for both `slice` and `str` types.)
 - rust-lang/rust#142491 (Rework #[cold] attribute parser)
 - rust-lang/rust#142494 (Fix missing docs in `rustc_attr_parsing`)
 - rust-lang/rust#142495 (Better template for `#[repr]` attributes)
 - rust-lang/rust#142497 (Fix random failure when JS code is executed when the whole file was not read yet)
 - rust-lang/rust#142575 (Ensure copy* intrinsics also perform the static self-init checks)
 - rust-lang/rust#142650 (Refactor Translator)
 - rust-lang/rust#142713 (mbe: Refactor transcription)
 - rust-lang/rust#142755 (rustdoc: Remove `FormatRenderer::cache`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-06-20 23:09:48 +00:00
commit 15c701fbc9
46 changed files with 995 additions and 755 deletions

View file

@ -202,6 +202,9 @@ pub enum AttributeKind {
span: Span,
},
/// Represents `#[cold]`.
Cold(Span),
/// Represents `#[rustc_confusables]`.
Confusables {
symbols: ThinVec<Symbol>,

View file

@ -38,3 +38,21 @@ impl<S: Stage> SingleAttributeParser<S> for OptimizeParser {
Some(AttributeKind::Optimize(res, cx.attr_span))
}
}
pub(crate) struct ColdParser;
impl<S: Stage> SingleAttributeParser<S> for ColdParser {
const PATH: &[rustc_span::Symbol] = &[sym::cold];
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const TEMPLATE: AttributeTemplate = template!(Word);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
if !args.no_args() {
cx.expected_no_args(args.span().unwrap_or(cx.attr_span));
return None;
};
Some(AttributeKind::Cold(cx.attr_span))
}
}

View file

@ -87,8 +87,19 @@ pub(crate) trait AttributeParser<S: Stage>: Default + 'static {
/// [`SingleAttributeParser`] can only convert attributes one-to-one, and cannot combine multiple
/// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example.
pub(crate) trait SingleAttributeParser<S: Stage>: 'static {
/// The single path of the attribute this parser accepts.
///
/// If you need the parser to accept more than one path, use [`AttributeParser`] instead
const PATH: &[Symbol];
/// Configures the precedence of attributes with the same `PATH` on a syntax node.
const ATTRIBUTE_ORDER: AttributeOrder;
/// Configures what to do when when the same attribute is
/// applied more than once on the same syntax node.
///
/// [`ATTRIBUTE_ORDER`](Self::ATTRIBUTE_ORDER) specified which one is assumed to be correct,
/// and this specified whether to, for example, warn or error on the other one.
const ON_DUPLICATE: OnDuplicate<S>;
/// The template this attribute parser should implement. Used for diagnostics.
@ -98,6 +109,8 @@ pub(crate) trait SingleAttributeParser<S: Stage>: 'static {
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind>;
}
/// Use in combination with [`SingleAttributeParser`].
/// `Single<T: SingleAttributeParser>` implements [`AttributeParser`].
pub(crate) struct Single<T: SingleAttributeParser<S>, S: Stage>(
PhantomData<(S, T)>,
Option<(AttributeKind, Span)>,
@ -230,6 +243,10 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static {
const PATH: &[rustc_span::Symbol];
type Item;
/// A function that converts individual items (of type [`Item`](Self::Item)) into the final attribute.
///
/// For example, individual representations fomr `#[repr(...)]` attributes into an `AttributeKind::Repr(x)`,
/// where `x` is a vec of these individual reprs.
const CONVERT: ConvertFn<Self::Item>;
/// The template this attribute parser should implement. Used for diagnostics.
@ -242,6 +259,8 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static {
) -> impl IntoIterator<Item = Self::Item> + 'c;
}
/// Use in combination with [`CombineAttributeParser`].
/// `Combine<T: CombineAttributeParser>` implements [`AttributeParser`].
pub(crate) struct Combine<T: CombineAttributeParser<S>, S: Stage>(
PhantomData<(S, T)>,
ThinVec<<T as CombineAttributeParser<S>>::Item>,

View file

@ -25,7 +25,8 @@ impl<S: Stage> CombineAttributeParser<S> for ReprParser {
const PATH: &[Symbol] = &[sym::repr];
const CONVERT: ConvertFn<Self::Item> = AttributeKind::Repr;
// FIXME(jdonszelmann): never used
const TEMPLATE: AttributeTemplate = template!(List: "C");
const TEMPLATE: AttributeTemplate =
template!(List: "C | Rust | align(...) | packed(...) | <integer type> | transparent");
fn extend<'c>(
cx: &'c mut AcceptContext<'_, '_, S>,

View file

@ -15,7 +15,7 @@ use rustc_session::Session;
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym};
use crate::attributes::allow_unstable::{AllowConstFnUnstableParser, AllowInternalUnstableParser};
use crate::attributes::codegen_attrs::OptimizeParser;
use crate::attributes::codegen_attrs::{ColdParser, OptimizeParser};
use crate::attributes::confusables::ConfusablesParser;
use crate::attributes::deprecation::DeprecationParser;
use crate::attributes::inline::{InlineParser, RustcForceInlineParser};
@ -106,6 +106,7 @@ attribute_parsers!(
// tidy-alphabetical-start
Single<AsPtrParser>,
Single<ColdParser>,
Single<ConstStabilityIndirectParser>,
Single<DeprecationParser>,
Single<InlineParser>,
@ -234,6 +235,16 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
})
}
pub(crate) fn expected_no_args(&self, args_span: Span) -> ErrorGuaranteed {
self.emit_err(AttributeParseError {
span: args_span,
attr_span: self.attr_span,
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedNoArgs,
})
}
/// emit an error that a `name = value` pair was expected at this span. The symbol can be given for
/// a nicer error message talking about the specific name that was found lacking a value.
pub(crate) fn expected_name_value(&self, span: Span, name: Option<Symbol>) -> ErrorGuaranteed {

View file

@ -474,6 +474,7 @@ pub(crate) struct UnrecognizedReprHint {
}
pub(crate) enum AttributeParseErrorReason {
ExpectedNoArgs,
ExpectedStringLiteral { byte_string: Option<Span> },
ExpectedSingleArgument,
ExpectedList,
@ -529,6 +530,10 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError {
diag.span_label(self.span, format!("didn't expect a literal here"));
diag.code(E0565);
}
AttributeParseErrorReason::ExpectedNoArgs => {
diag.span_label(self.span, format!("didn't expect any arguments here"));
diag.code(E0565);
}
AttributeParseErrorReason::ExpectedNameValue(None) => {
diag.span_label(
self.span,

View file

@ -14,10 +14,10 @@ use rustc_data_structures::jobserver::{self, Acquired};
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard};
use rustc_errors::emitter::Emitter;
use rustc_errors::translation::Translate;
use rustc_errors::translation::Translator;
use rustc_errors::{
Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, FluentBundle, Level, MultiSpan,
Style, Suggestions,
Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, Level, MultiSpan, Style,
Suggestions,
};
use rustc_fs_util::link_or_copy;
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
@ -1889,16 +1889,6 @@ impl SharedEmitter {
}
}
impl Translate for SharedEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
None
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
panic!("shared emitter attempted to translate a diagnostic");
}
}
impl Emitter for SharedEmitter {
fn emit_diagnostic(
&mut self,
@ -1932,6 +1922,10 @@ impl Emitter for SharedEmitter {
fn source_map(&self) -> Option<&SourceMap> {
None
}
fn translator(&self) -> &Translator {
panic!("shared emitter attempted to translate a diagnostic");
}
}
impl SharedEmitterMain {

View file

@ -4,7 +4,7 @@ use rustc_abi::ExternAbi;
use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode};
use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr};
use rustc_attr_data_structures::{
AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr, find_attr,
AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr, ReprAttr, find_attr,
};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId};
@ -110,8 +110,20 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
}
};
if let hir::Attribute::Parsed(AttributeKind::Align { align, .. }) = attr {
codegen_fn_attrs.alignment = Some(*align);
if let hir::Attribute::Parsed(p) = attr {
match p {
AttributeKind::Repr(reprs) => {
codegen_fn_attrs.alignment = reprs
.iter()
.filter_map(
|(r, _)| if let ReprAttr::ReprAlign(x) = r { Some(*x) } else { None },
)
.max();
}
AttributeKind::Cold(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD,
AttributeKind::Align { align, .. } => codegen_fn_attrs.alignment = Some(*align),
_ => {}
}
}
let Some(Ident { name, .. }) = attr.ident() else {
@ -119,7 +131,6 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
};
match name {
sym::cold => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD,
sym::rustc_allocator => codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR,
sym::ffi_pure => codegen_fn_attrs.flags |= CodegenFnAttrFlags::FFI_PURE,
sym::ffi_const => codegen_fn_attrs.flags |= CodegenFnAttrFlags::FFI_CONST,

View file

@ -1412,8 +1412,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let src_alloc = self.get_alloc_raw(src_alloc_id)?;
let src_range = alloc_range(src_offset, size);
assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
// For the overlapping case, it is crucial that we trigger the read hook
// Trigger read hooks.
// For the overlapping case, it is crucial that we trigger the read hooks
// before the write hook -- the aliasing model cares about the order.
if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes() as i64) {
M::before_alloc_read(self, alloc_id)?;
}
M::before_memory_read(
tcx,
&self.machine,

View file

@ -38,6 +38,7 @@ use rustc_data_structures::profiling::{
};
use rustc_errors::emitter::stderr_destination;
use rustc_errors::registry::Registry;
use rustc_errors::translation::Translator;
use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown};
use rustc_feature::find_gated_cfg;
// This avoids a false positive with `-Wunused_crate_dependencies`.
@ -109,6 +110,10 @@ use crate::session_diagnostics::{
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
pub fn default_translator() -> Translator {
Translator::with_fallback_bundle(DEFAULT_LOCALE_RESOURCES.to_vec(), false)
}
pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[
// tidy-alphabetical-start
crate::DEFAULT_LOCALE_RESOURCE,
@ -1413,11 +1418,10 @@ fn report_ice(
extra_info: fn(&DiagCtxt),
using_internal_features: &AtomicBool,
) {
let fallback_bundle =
rustc_errors::fallback_fluent_bundle(crate::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
let translator = default_translator();
let emitter = Box::new(rustc_errors::emitter::HumanEmitter::new(
stderr_destination(rustc_errors::ColorConfig::Auto),
fallback_bundle,
translator,
));
let dcx = rustc_errors::DiagCtxt::new(emitter);
let dcx = dcx.handle();

View file

@ -18,7 +18,7 @@ pub use fluent_bundle::{self, FluentArgs, FluentError, FluentValue};
use fluent_syntax::parser::ParserError;
use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
use intl_memoizer::concurrent::IntlLangMemoizer;
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_data_structures::sync::{DynSend, IntoDynSyncSend};
use rustc_macros::{Decodable, Encodable};
use rustc_span::Span;
use smallvec::SmallVec;
@ -204,16 +204,16 @@ fn register_functions(bundle: &mut FluentBundle) {
/// Type alias for the result of `fallback_fluent_bundle` - a reference-counted pointer to a lazily
/// evaluated fluent bundle.
pub type LazyFallbackBundle = Arc<LazyLock<FluentBundle, impl FnOnce() -> FluentBundle>>;
pub type LazyFallbackBundle =
Arc<LazyLock<FluentBundle, Box<dyn FnOnce() -> FluentBundle + DynSend>>>;
/// Return the default `FluentBundle` with standard "en-US" diagnostic messages.
#[instrument(level = "trace", skip(resources))]
#[define_opaque(LazyFallbackBundle)]
pub fn fallback_fluent_bundle(
resources: Vec<&'static str>,
with_directionality_markers: bool,
) -> LazyFallbackBundle {
Arc::new(LazyLock::new(move || {
Arc::new(LazyLock::new(Box::new(move || {
let mut fallback_bundle = new_bundle(vec![langid!("en-US")]);
register_functions(&mut fallback_bundle);
@ -228,7 +228,7 @@ pub fn fallback_fluent_bundle(
}
fallback_bundle
}))
})))
}
/// Identifier for the Fluent message/attribute corresponding to a diagnostic message.

View file

@ -15,17 +15,15 @@ use rustc_span::source_map::SourceMap;
use crate::emitter::FileWithAnnotatedLines;
use crate::registry::Registry;
use crate::snippet::Line;
use crate::translation::{Translate, to_fluent_args};
use crate::translation::{Translator, to_fluent_args};
use crate::{
CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, FluentBundle, LazyFallbackBundle,
Level, MultiSpan, Style, Subdiag,
CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, Level, MultiSpan, Style, Subdiag,
};
/// Generates diagnostics using annotate-snippet
pub struct AnnotateSnippetEmitter {
source_map: Option<Arc<SourceMap>>,
fluent_bundle: Option<Arc<FluentBundle>>,
fallback_bundle: LazyFallbackBundle,
translator: Translator,
/// If true, hides the longer explanation text
short_message: bool,
@ -35,16 +33,6 @@ pub struct AnnotateSnippetEmitter {
macro_backtrace: bool,
}
impl Translate for AnnotateSnippetEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
self.fluent_bundle.as_deref()
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
&self.fallback_bundle
}
}
impl Emitter for AnnotateSnippetEmitter {
/// The entry point for the diagnostics generation
fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) {
@ -78,6 +66,10 @@ impl Emitter for AnnotateSnippetEmitter {
fn should_show_explain(&self) -> bool {
!self.short_message
}
fn translator(&self) -> &Translator {
&self.translator
}
}
/// Provides the source string for the given `line` of `file`
@ -104,19 +96,11 @@ fn annotation_level_for_level(level: Level) -> annotate_snippets::Level {
impl AnnotateSnippetEmitter {
pub fn new(
source_map: Option<Arc<SourceMap>>,
fluent_bundle: Option<Arc<FluentBundle>>,
fallback_bundle: LazyFallbackBundle,
translator: Translator,
short_message: bool,
macro_backtrace: bool,
) -> Self {
Self {
source_map,
fluent_bundle,
fallback_bundle,
short_message,
ui_testing: false,
macro_backtrace,
}
Self { source_map, translator, short_message, ui_testing: false, macro_backtrace }
}
/// Allows to modify `Self` to enable or disable the `ui_testing` flag.
@ -137,7 +121,7 @@ impl AnnotateSnippetEmitter {
_children: &[Subdiag],
_suggestions: &[CodeSuggestion],
) {
let message = self.translate_messages(messages, args);
let message = self.translator.translate_messages(messages, args);
if let Some(source_map) = &self.source_map {
// Make sure our primary file comes first
let primary_lo = if let Some(primary_span) = msp.primary_span().as_ref() {

View file

@ -35,10 +35,10 @@ use crate::snippet::{
};
use crate::styled_buffer::StyledBuffer;
use crate::timings::TimingRecord;
use crate::translation::{Translate, to_fluent_args};
use crate::translation::{Translator, to_fluent_args};
use crate::{
CodeSuggestion, DiagInner, DiagMessage, ErrCode, FluentBundle, LazyFallbackBundle, Level,
MultiSpan, Subdiag, SubstitutionHighlight, SuggestionStyle, TerminalUrl,
CodeSuggestion, DiagInner, DiagMessage, ErrCode, Level, MultiSpan, Subdiag,
SubstitutionHighlight, SuggestionStyle, TerminalUrl,
};
/// Default column width, used in tests and when terminal dimensions cannot be determined.
@ -175,7 +175,7 @@ const ANONYMIZED_LINE_NUM: &str = "LL";
pub type DynEmitter = dyn Emitter + DynSend;
/// Emitter trait for emitting errors and other structured information.
pub trait Emitter: Translate {
pub trait Emitter {
/// Emit a structured diagnostic.
fn emit_diagnostic(&mut self, diag: DiagInner, registry: &Registry);
@ -212,6 +212,8 @@ pub trait Emitter: Translate {
fn source_map(&self) -> Option<&SourceMap>;
fn translator(&self) -> &Translator;
/// Formats the substitutions of the primary_span
///
/// There are a lot of conditions to this method, but in short:
@ -224,13 +226,17 @@ pub trait Emitter: Translate {
/// * If the current `DiagInner` has multiple suggestions,
/// we leave `primary_span` and the suggestions untouched.
fn primary_span_formatted(
&mut self,
&self,
primary_span: &mut MultiSpan,
suggestions: &mut Vec<CodeSuggestion>,
fluent_args: &FluentArgs<'_>,
) {
if let Some((sugg, rest)) = suggestions.split_first() {
let msg = self.translate_message(&sugg.msg, fluent_args).map_err(Report::new).unwrap();
let msg = self
.translator()
.translate_message(&sugg.msg, fluent_args)
.map_err(Report::new)
.unwrap();
if rest.is_empty()
// ^ if there is only one suggestion
// don't display multi-suggestions as labels
@ -491,16 +497,6 @@ pub trait Emitter: Translate {
}
}
impl Translate for HumanEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
self.fluent_bundle.as_deref()
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
&self.fallback_bundle
}
}
impl Emitter for HumanEmitter {
fn source_map(&self) -> Option<&SourceMap> {
self.sm.as_deref()
@ -538,25 +534,41 @@ impl Emitter for HumanEmitter {
fn supports_color(&self) -> bool {
self.dst.supports_color()
}
fn translator(&self) -> &Translator {
&self.translator
}
}
/// An emitter that does nothing when emitting a non-fatal diagnostic.
/// Fatal diagnostics are forwarded to `fatal_emitter` to avoid silent
/// failures of rustc, as witnessed e.g. in issue #89358.
pub struct SilentEmitter {
pub struct FatalOnlyEmitter {
pub fatal_emitter: Box<dyn Emitter + DynSend>,
pub fatal_note: Option<String>,
pub emit_fatal_diagnostic: bool,
}
impl Translate for SilentEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
impl Emitter for FatalOnlyEmitter {
fn source_map(&self) -> Option<&SourceMap> {
None
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
self.fatal_emitter.fallback_fluent_bundle()
fn emit_diagnostic(&mut self, mut diag: DiagInner, registry: &Registry) {
if diag.level == Level::Fatal {
if let Some(fatal_note) = &self.fatal_note {
diag.sub(Level::Note, fatal_note.clone(), MultiSpan::new());
}
self.fatal_emitter.emit_diagnostic(diag, registry);
}
}
fn translator(&self) -> &Translator {
self.fatal_emitter.translator()
}
}
pub struct SilentEmitter {
pub translator: Translator,
}
impl Emitter for SilentEmitter {
@ -564,13 +576,10 @@ impl Emitter for SilentEmitter {
None
}
fn emit_diagnostic(&mut self, mut diag: DiagInner, registry: &Registry) {
if self.emit_fatal_diagnostic && diag.level == Level::Fatal {
if let Some(fatal_note) = &self.fatal_note {
diag.sub(Level::Note, fatal_note.clone(), MultiSpan::new());
}
self.fatal_emitter.emit_diagnostic(diag, registry);
}
fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {}
fn translator(&self) -> &Translator {
&self.translator
}
}
@ -615,9 +624,8 @@ pub struct HumanEmitter {
#[setters(skip)]
dst: IntoDynSyncSend<Destination>,
sm: Option<Arc<SourceMap>>,
fluent_bundle: Option<Arc<FluentBundle>>,
#[setters(skip)]
fallback_bundle: LazyFallbackBundle,
translator: Translator,
short_message: bool,
ui_testing: bool,
ignored_directories_in_source_blocks: Vec<String>,
@ -637,12 +645,11 @@ pub(crate) struct FileWithAnnotatedLines {
}
impl HumanEmitter {
pub fn new(dst: Destination, fallback_bundle: LazyFallbackBundle) -> HumanEmitter {
pub fn new(dst: Destination, translator: Translator) -> HumanEmitter {
HumanEmitter {
dst: IntoDynSyncSend(dst),
sm: None,
fluent_bundle: None,
fallback_bundle,
translator,
short_message: false,
ui_testing: false,
ignored_directories_in_source_blocks: Vec::new(),
@ -1433,7 +1440,7 @@ impl HumanEmitter {
// very *weird* formats
// see?
for (text, style) in msgs.iter() {
let text = self.translate_message(text, args).map_err(Report::new).unwrap();
let text = self.translator.translate_message(text, args).map_err(Report::new).unwrap();
let text = &normalize_whitespace(&text);
let lines = text.split('\n').collect::<Vec<_>>();
if lines.len() > 1 {
@ -1528,7 +1535,8 @@ impl HumanEmitter {
}
let mut line = 0;
for (text, style) in msgs.iter() {
let text = self.translate_message(text, args).map_err(Report::new).unwrap();
let text =
self.translator.translate_message(text, args).map_err(Report::new).unwrap();
// Account for newlines to align output to its label.
for text in normalize_whitespace(&text).lines() {
buffer.append(
@ -1560,7 +1568,7 @@ impl HumanEmitter {
.into_iter()
.filter_map(|label| match label.label {
Some(msg) if label.is_primary => {
let text = self.translate_message(&msg, args).ok()?;
let text = self.translator.translate_message(&msg, args).ok()?;
if !text.trim().is_empty() { Some(text.to_string()) } else { None }
}
_ => None,
@ -3104,7 +3112,11 @@ impl FileWithAnnotatedLines {
let label = label.as_ref().map(|m| {
normalize_whitespace(
&emitter.translate_message(m, args).map_err(Report::new).unwrap(),
&emitter
.translator()
.translate_message(m, args)
.map_err(Report::new)
.unwrap(),
)
});

View file

@ -32,11 +32,8 @@ use crate::emitter::{
};
use crate::registry::Registry;
use crate::timings::{TimingRecord, TimingSection};
use crate::translation::{Translate, to_fluent_args};
use crate::{
CodeSuggestion, FluentBundle, LazyFallbackBundle, MultiSpan, SpanLabel, Subdiag, Suggestions,
TerminalUrl,
};
use crate::translation::{Translator, to_fluent_args};
use crate::{CodeSuggestion, MultiSpan, SpanLabel, Subdiag, Suggestions, TerminalUrl};
#[cfg(test)]
mod tests;
@ -47,9 +44,8 @@ pub struct JsonEmitter {
dst: IntoDynSyncSend<Box<dyn Write + Send>>,
#[setters(skip)]
sm: Option<Arc<SourceMap>>,
fluent_bundle: Option<Arc<FluentBundle>>,
#[setters(skip)]
fallback_bundle: LazyFallbackBundle,
translator: Translator,
#[setters(skip)]
pretty: bool,
ui_testing: bool,
@ -67,7 +63,7 @@ impl JsonEmitter {
pub fn new(
dst: Box<dyn Write + Send>,
sm: Option<Arc<SourceMap>>,
fallback_bundle: LazyFallbackBundle,
translator: Translator,
pretty: bool,
json_rendered: HumanReadableErrorType,
color_config: ColorConfig,
@ -75,8 +71,7 @@ impl JsonEmitter {
JsonEmitter {
dst: IntoDynSyncSend(dst),
sm,
fluent_bundle: None,
fallback_bundle,
translator,
pretty,
ui_testing: false,
ignored_directories_in_source_blocks: Vec::new(),
@ -110,16 +105,6 @@ enum EmitTyped<'a> {
UnusedExtern(UnusedExterns<'a>),
}
impl Translate for JsonEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
self.fluent_bundle.as_deref()
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
&self.fallback_bundle
}
}
impl Emitter for JsonEmitter {
fn emit_diagnostic(&mut self, diag: crate::DiagInner, registry: &Registry) {
let data = Diagnostic::from_errors_diagnostic(diag, self, registry);
@ -194,6 +179,10 @@ impl Emitter for JsonEmitter {
fn should_show_explain(&self) -> bool {
!self.json_rendered.short()
}
fn translator(&self) -> &Translator {
&self.translator
}
}
// The following data types are provided just for serialisation.
@ -324,7 +313,7 @@ impl Diagnostic {
let args = to_fluent_args(diag.args.iter());
let sugg_to_diag = |sugg: &CodeSuggestion| {
let translated_message =
je.translate_message(&sugg.msg, &args).map_err(Report::new).unwrap();
je.translator.translate_message(&sugg.msg, &args).map_err(Report::new).unwrap();
Diagnostic {
message: translated_message.to_string(),
code: None,
@ -368,7 +357,7 @@ impl Diagnostic {
}
}
let translated_message = je.translate_messages(&diag.messages, &args);
let translated_message = je.translator.translate_messages(&diag.messages, &args);
let code = if let Some(code) = diag.code {
Some(DiagnosticCode {
@ -396,10 +385,9 @@ impl Diagnostic {
ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)),
ColorConfig::Never => {}
}
HumanEmitter::new(dst, Arc::clone(&je.fallback_bundle))
HumanEmitter::new(dst, je.translator.clone())
.short_message(short)
.sm(je.sm.clone())
.fluent_bundle(je.fluent_bundle.clone())
.diagnostic_width(je.diagnostic_width)
.macro_backtrace(je.macro_backtrace)
.track_diagnostics(je.track_diagnostics)
@ -430,7 +418,7 @@ impl Diagnostic {
args: &FluentArgs<'_>,
je: &JsonEmitter,
) -> Diagnostic {
let translated_message = je.translate_messages(&subdiag.messages, args);
let translated_message = je.translator.translate_messages(&subdiag.messages, args);
Diagnostic {
message: translated_message.to_string(),
code: None,
@ -454,7 +442,7 @@ impl DiagnosticSpan {
span.is_primary,
span.label
.as_ref()
.map(|m| je.translate_message(m, args).unwrap())
.map(|m| je.translator.translate_message(m, args).unwrap())
.map(|m| m.to_string()),
suggestion,
je,

View file

@ -41,14 +41,14 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
rustc_span::create_default_session_globals_then(|| {
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned());
let fallback_bundle =
crate::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
let translator =
Translator::with_fallback_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
let output = Arc::new(Mutex::new(Vec::new()));
let je = JsonEmitter::new(
Box::new(Shared { data: output.clone() }),
Some(sm),
fallback_bundle,
translator,
true, // pretty
HumanReadableErrorType::Short,
ColorConfig::Never,

View file

@ -748,40 +748,10 @@ impl DiagCtxt {
Self { inner: Lock::new(DiagCtxtInner::new(emitter)) }
}
pub fn make_silent(&self, fatal_note: Option<String>, emit_fatal_diagnostic: bool) {
// An empty type that implements `Emitter` to temporarily swap in place of the real one,
// which will be used in constructing its replacement.
struct FalseEmitter;
impl Emitter for FalseEmitter {
fn emit_diagnostic(&mut self, _: DiagInner, _: &Registry) {
unimplemented!("false emitter must only used during `make_silent`")
}
fn source_map(&self) -> Option<&SourceMap> {
unimplemented!("false emitter must only used during `make_silent`")
}
}
impl translation::Translate for FalseEmitter {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
unimplemented!("false emitter must only used during `make_silent`")
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
unimplemented!("false emitter must only used during `make_silent`")
}
}
pub fn make_silent(&self) {
let mut inner = self.inner.borrow_mut();
let mut prev_emitter = Box::new(FalseEmitter) as Box<dyn Emitter + DynSend>;
std::mem::swap(&mut inner.emitter, &mut prev_emitter);
let new_emitter = Box::new(emitter::SilentEmitter {
fatal_emitter: prev_emitter,
fatal_note,
emit_fatal_diagnostic,
});
inner.emitter = new_emitter;
let translator = inner.emitter.translator().clone();
inner.emitter = Box::new(emitter::SilentEmitter { translator });
}
pub fn set_emitter(&self, emitter: Box<dyn Emitter + DynSend>) {
@ -1771,7 +1741,12 @@ impl DiagCtxtInner {
args: impl Iterator<Item = DiagArg<'a>>,
) -> String {
let args = crate::translation::to_fluent_args(args);
self.emitter.translate_message(&message, &args).map_err(Report::new).unwrap().to_string()
self.emitter
.translator()
.translate_message(&message, &args)
.map_err(Report::new)
.unwrap()
.to_string()
}
fn eagerly_translate_for_subdiag(

View file

@ -1,3 +1,5 @@
use std::sync::{Arc, LazyLock};
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_error_messages::fluent_bundle::resolver::errors::{ReferenceKind, ResolverError};
use rustc_error_messages::{DiagMessage, langid};
@ -5,23 +7,9 @@ use rustc_error_messages::{DiagMessage, langid};
use crate::FluentBundle;
use crate::error::{TranslateError, TranslateErrorKind};
use crate::fluent_bundle::*;
use crate::translation::Translate;
use crate::translation::Translator;
struct Dummy {
bundle: FluentBundle,
}
impl Translate for Dummy {
fn fluent_bundle(&self) -> Option<&FluentBundle> {
None
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
&self.bundle
}
}
fn make_dummy(ftl: &'static str) -> Dummy {
fn make_translator(ftl: &'static str) -> Translator {
let resource = FluentResource::try_new(ftl.into()).expect("Failed to parse an FTL string.");
let langid_en = langid!("en-US");
@ -33,12 +21,15 @@ fn make_dummy(ftl: &'static str) -> Dummy {
bundle.add_resource(resource).expect("Failed to add FTL resources to the bundle.");
Dummy { bundle }
Translator {
fluent_bundle: None,
fallback_fluent_bundle: Arc::new(LazyLock::new(Box::new(|| bundle))),
}
}
#[test]
fn wellformed_fluent() {
let dummy = make_dummy("mir_build_borrow_of_moved_value = borrow of moved value
let translator = make_translator("mir_build_borrow_of_moved_value = borrow of moved value
.label = value moved into `{$name}` here
.occurs_because_label = move occurs because `{$name}` has type `{$ty}` which does not implement the `Copy` trait
.value_borrowed_label = value borrowed here after move
@ -54,7 +45,7 @@ fn wellformed_fluent() {
);
assert_eq!(
dummy.translate_message(&message, &args).unwrap(),
translator.translate_message(&message, &args).unwrap(),
"borrow this binding in the pattern to avoid moving the value"
);
}
@ -66,7 +57,7 @@ fn wellformed_fluent() {
);
assert_eq!(
dummy.translate_message(&message, &args).unwrap(),
translator.translate_message(&message, &args).unwrap(),
"value borrowed here after move"
);
}
@ -78,7 +69,7 @@ fn wellformed_fluent() {
);
assert_eq!(
dummy.translate_message(&message, &args).unwrap(),
translator.translate_message(&message, &args).unwrap(),
"move occurs because `\u{2068}Foo\u{2069}` has type `\u{2068}std::string::String\u{2069}` which does not implement the `Copy` trait"
);
@ -89,7 +80,7 @@ fn wellformed_fluent() {
);
assert_eq!(
dummy.translate_message(&message, &args).unwrap(),
translator.translate_message(&message, &args).unwrap(),
"value moved into `\u{2068}Foo\u{2069}` here"
);
}
@ -98,7 +89,7 @@ fn wellformed_fluent() {
#[test]
fn misformed_fluent() {
let dummy = make_dummy("mir_build_borrow_of_moved_value = borrow of moved value
let translator = make_translator("mir_build_borrow_of_moved_value = borrow of moved value
.label = value moved into `{name}` here
.occurs_because_label = move occurs because `{$oops}` has type `{$ty}` which does not implement the `Copy` trait
.suggestion = borrow this binding in the pattern to avoid moving the value");
@ -112,7 +103,7 @@ fn misformed_fluent() {
Some("value_borrowed_label".into()),
);
let err = dummy.translate_message(&message, &args).unwrap_err();
let err = translator.translate_message(&message, &args).unwrap_err();
assert!(
matches!(
&err,
@ -141,7 +132,7 @@ fn misformed_fluent() {
Some("label".into()),
);
let err = dummy.translate_message(&message, &args).unwrap_err();
let err = translator.translate_message(&message, &args).unwrap_err();
if let TranslateError::Two {
primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. },
fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. },
@ -168,7 +159,7 @@ fn misformed_fluent() {
Some("occurs_because_label".into()),
);
let err = dummy.translate_message(&message, &args).unwrap_err();
let err = translator.translate_message(&message, &args).unwrap_err();
if let TranslateError::Two {
primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. },
fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. },

View file

@ -1,8 +1,9 @@
use std::borrow::Cow;
use std::env;
use std::error::Report;
use std::sync::Arc;
pub use rustc_error_messages::FluentArgs;
pub use rustc_error_messages::{FluentArgs, LazyFallbackBundle};
use tracing::{debug, trace};
use crate::error::{TranslateError, TranslateErrorKind};
@ -28,19 +29,33 @@ pub fn to_fluent_args<'iter>(iter: impl Iterator<Item = DiagArg<'iter>>) -> Flue
args
}
pub trait Translate {
/// Return `FluentBundle` with localized diagnostics for the locale requested by the user. If no
/// language was requested by the user then this will be `None` and `fallback_fluent_bundle`
/// should be used.
fn fluent_bundle(&self) -> Option<&FluentBundle>;
#[derive(Clone)]
pub struct Translator {
/// Localized diagnostics for the locale requested by the user. If no language was requested by
/// the user then this will be `None` and `fallback_fluent_bundle` should be used.
pub fluent_bundle: Option<Arc<FluentBundle>>,
/// Return `FluentBundle` with localized diagnostics for the default locale of the compiler.
/// Used when the user has not requested a specific language or when a localized diagnostic is
/// unavailable for the requested locale.
fn fallback_fluent_bundle(&self) -> &FluentBundle;
pub fallback_fluent_bundle: LazyFallbackBundle,
}
impl Translator {
pub fn with_fallback_bundle(
resources: Vec<&'static str>,
with_directionality_markers: bool,
) -> Translator {
Translator {
fluent_bundle: None,
fallback_fluent_bundle: crate::fallback_fluent_bundle(
resources,
with_directionality_markers,
),
}
}
/// Convert `DiagMessage`s to a string, performing translation if necessary.
fn translate_messages(
pub fn translate_messages(
&self,
messages: &[(DiagMessage, Style)],
args: &FluentArgs<'_>,
@ -54,7 +69,7 @@ pub trait Translate {
}
/// Convert a `DiagMessage` to a string, performing translation if necessary.
fn translate_message<'a>(
pub fn translate_message<'a>(
&'a self,
message: &'a DiagMessage,
args: &'a FluentArgs<'_>,
@ -91,7 +106,7 @@ pub trait Translate {
};
try {
match self.fluent_bundle().map(|b| translate_with_bundle(b)) {
match self.fluent_bundle.as_ref().map(|b| translate_with_bundle(b)) {
// The primary bundle was present and translation succeeded
Some(Ok(t)) => t,
@ -102,7 +117,7 @@ pub trait Translate {
primary @ TranslateError::One {
kind: TranslateErrorKind::MessageMissing, ..
},
)) => translate_with_bundle(self.fallback_fluent_bundle())
)) => translate_with_bundle(&self.fallback_fluent_bundle)
.map_err(|fallback| primary.and(fallback))?,
// Always yeet out for errors on debug (unless
@ -118,11 +133,11 @@ pub trait Translate {
// ..otherwise, for end users, an error about this wouldn't be useful or actionable, so
// just hide it and try with the fallback bundle.
Some(Err(primary)) => translate_with_bundle(self.fallback_fluent_bundle())
Some(Err(primary)) => translate_with_bundle(&self.fallback_fluent_bundle)
.map_err(|fallback| primary.and(fallback))?,
// The primary bundle is missing, proceed to the fallback bundle
None => translate_with_bundle(self.fallback_fluent_bundle())
None => translate_with_bundle(&self.fallback_fluent_bundle)
.map_err(|fallback| TranslateError::primary(identifier, args).and(fallback))?,
}
}

View file

@ -9,7 +9,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
use rustc_parse::lexer::nfc_normalize;
use rustc_parse::parser::ParseNtResult;
use rustc_session::parse::{ParseSess, SymbolGallery};
use rustc_session::parse::ParseSess;
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::{
Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
@ -25,20 +25,77 @@ use crate::mbe::macro_parser::NamedMatch::*;
use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
use crate::mbe::{self, KleeneOp, MetaVarExpr};
// A Marker adds the given mark to the syntax context.
struct Marker(LocalExpnId, Transparency, FxHashMap<SyntaxContext, SyntaxContext>);
/// Context needed to perform transcription of metavariable expressions.
struct TranscrCtx<'psess, 'itp> {
psess: &'psess ParseSess,
/// Map from metavars to matched tokens
interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
/// Allow marking spans.
marker: Marker,
/// The stack of things yet to be completely expanded.
///
/// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
/// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
/// choice of spacing values doesn't matter.
stack: SmallVec<[Frame<'itp>; 1]>,
/// A stack of where we are in the repeat expansion.
///
/// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
/// `repeats` keeps track of where we are in matching at each level, with the last element
/// being the most deeply nested sequence. This is used as a stack.
repeats: Vec<(usize, usize)>,
/// The resulting token stream from the `TokenTree` we just finished processing.
///
/// At the end, this will contain the full result of transcription, but at arbitrary points
/// during `transcribe`, `result` will contain subsets of the final result.
///
/// Specifically, as we descend into each TokenTree, we will push the existing results onto the
/// `result_stack` and clear `results`. We will then produce the results of transcribing the
/// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
/// `result_stack` and append `results` too it to produce the new `results` up to that point.
///
/// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
/// again, and we are done transcribing.
result: Vec<TokenTree>,
/// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a
/// new entry.
result_stack: Vec<Vec<TokenTree>>,
}
impl<'psess> TranscrCtx<'psess, '_> {
/// Span marked with the correct expansion and transparency.
fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
let mut span = dspan.entire();
self.marker.mark_span(&mut span);
span
}
}
/// A Marker adds the given mark to the syntax context.
struct Marker {
expand_id: LocalExpnId,
transparency: Transparency,
cache: FxHashMap<SyntaxContext, SyntaxContext>,
}
impl Marker {
/// Mark a span with the stored expansion ID and transparency.
fn mark_span(&mut self, span: &mut Span) {
// `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
// by itself. All tokens in a macro body typically have the same syntactic context, unless
// it's some advanced case with macro-generated macros. So if we cache the marked version
// of that context once, we'll typically have a 100% cache hit rate after that.
let Marker(expn_id, transparency, ref mut cache) = *self;
*span = span.map_ctxt(|ctxt| {
*cache
*self
.cache
.entry(ctxt)
.or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency))
.or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
});
}
}
@ -116,52 +173,36 @@ pub(super) fn transcribe<'a>(
return Ok(TokenStream::default());
}
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
// choice of spacing values doesn't matter.
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
src,
src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
)];
let mut tscx = TranscrCtx {
psess,
interp,
marker: Marker { expand_id, transparency, cache: Default::default() },
repeats: Vec::new(),
stack: smallvec![Frame::new_delimited(
src,
src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
)],
result: Vec::new(),
result_stack: Vec::new(),
};
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
// `repeats` keeps track of where we are in matching at each level, with the last element being
// the most deeply nested sequence. This is used as a stack.
let mut repeats: Vec<(usize, usize)> = Vec::new();
// `result` contains resulting token stream from the TokenTree we just finished processing. At
// the end, this will contain the full result of transcription, but at arbitrary points during
// `transcribe`, `result` will contain subsets of the final result.
//
// Specifically, as we descend into each TokenTree, we will push the existing results onto the
// `result_stack` and clear `results`. We will then produce the results of transcribing the
// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
// `result_stack` and append `results` too it to produce the new `results` up to that point.
//
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
// again, and we are done transcribing.
let mut result: Vec<TokenTree> = Vec::new();
let mut result_stack = Vec::new();
let mut marker = Marker(expand_id, transparency, Default::default());
let dcx = psess.dcx();
loop {
// Look at the last frame on the stack.
// If it still has a TokenTree we have not looked at yet, use that tree.
let Some(tree) = stack.last_mut().unwrap().next() else {
let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
// This else-case never produces a value for `tree` (it `continue`s or `return`s).
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
// go back to the beginning of the sequence.
let frame = stack.last_mut().unwrap();
let frame = tscx.stack.last_mut().unwrap();
if let FrameKind::Sequence { sep, .. } = &frame.kind {
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
*repeat_idx += 1;
if repeat_idx < repeat_len {
frame.idx = 0;
if let Some(sep) = sep {
result.push(TokenTree::Token(*sep, Spacing::Alone));
tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
}
continue;
}
@ -170,10 +211,10 @@ pub(super) fn transcribe<'a>(
// We are done with the top of the stack. Pop it. Depending on what it was, we do
// different things. Note that the outermost item must be the delimited, wrapped RHS
// that was passed in originally to `transcribe`.
match stack.pop().unwrap().kind {
match tscx.stack.pop().unwrap().kind {
// Done with a sequence. Pop from repeats.
FrameKind::Sequence { .. } => {
repeats.pop();
tscx.repeats.pop();
}
// We are done processing a Delimited. If this is the top-level delimited, we are
@ -185,15 +226,16 @@ pub(super) fn transcribe<'a>(
if delim == Delimiter::Bracket {
spacing.close = Spacing::Alone;
}
if result_stack.is_empty() {
if tscx.result_stack.is_empty() {
// No results left to compute! We are back at the top-level.
return Ok(TokenStream::new(result));
return Ok(TokenStream::new(tscx.result));
}
// Step back into the parent Delimited.
let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
result = result_stack.pop().unwrap();
result.push(tree);
let tree =
TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
tscx.result = tscx.result_stack.pop().unwrap();
tscx.result.push(tree);
}
}
continue;
@ -202,223 +244,19 @@ pub(super) fn transcribe<'a>(
// At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
// `tree` contains the next `TokenTree` to be processed.
match tree {
// We are descending into a sequence. We first make sure that the matchers in the RHS
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
// macro writer has made a mistake.
// Replace the sequence with its expansion.
seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
match lockstep_iter_size(seq, interp, &repeats) {
LockstepIterSize::Unconstrained => {
return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
}
LockstepIterSize::Contradiction(msg) => {
// FIXME: this really ought to be caught at macro definition time... It
// happens when two meta-variables are used in the same repetition in a
// sequence, but they come from different sequence matchers and repeat
// different amounts.
return Err(
dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })
);
}
LockstepIterSize::Constraint(len, _) => {
// We do this to avoid an extra clone above. We know that this is a
// sequence already.
let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
// Is the repetition empty?
if len == 0 {
if seq.kleene.op == KleeneOp::OneOrMore {
// FIXME: this really ought to be caught at macro definition
// time... It happens when the Kleene operator in the matcher and
// the body for the same meta-variable do not match.
return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
}
} else {
// 0 is the initial counter (we have done 0 repetitions so far). `len`
// is the total number of repetitions we should generate.
repeats.push((0, len));
// The first time we encounter the sequence we push it to the stack. It
// then gets reused (see the beginning of the loop) until we are done
// repeating.
stack.push(Frame::new_sequence(
seq_rep,
seq.separator.clone(),
seq.kleene.op,
));
}
}
}
transcribe_sequence(&mut tscx, seq, seq_rep)?;
}
// Replace the meta-var with the matched token tree from the invocation.
&mbe::TokenTree::MetaVar(mut sp, mut original_ident) => {
// Find the matched nonterminal from the macro invocation, and use it to replace
// the meta-var.
//
// We use `Spacing::Alone` everywhere here, because that's the conservative choice
// and spacing of declarative macros is tricky. E.g. in this macro:
// ```
// macro_rules! idents {
// ($($a:ident,)*) => { stringify!($($a)*) }
// }
// ```
// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
// producing "xyz", which is bad because it effectively merges tokens.
// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
// some of the unnecessary whitespace.
let ident = MacroRulesNormalizedIdent::new(original_ident);
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
// We wrap the tokens in invisible delimiters, unless they are already wrapped
// in invisible delimiters with the same `MetaVarKind`. Because some proc
// macros can't handle multiple layers of invisible delimiters of the same
// `MetaVarKind`. This loses some span info, though it hopefully won't matter.
let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
if stream.len() == 1 {
let tree = stream.iter().next().unwrap();
if let TokenTree::Delimited(_, _, delim, inner) = tree
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
&& mv_kind == *mvk
{
stream = inner.clone();
}
}
// Emit as a token stream within `Delimiter::Invisible` to maintain
// parsing priorities.
marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
// Both the open delim and close delim get the same span, which covers the
// `$foo` in the decl macro RHS.
TokenTree::Delimited(
DelimSpan::from_single(sp),
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
stream,
)
};
let tt = match cur_matched {
MatchedSingle(ParseNtResult::Tt(tt)) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups. Other variables are emitted into
// the output stream as groups with `Delimiter::Invisible` to maintain
// parsing priorities.
maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
}
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtIdent(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtLifetime(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Item(item)) => {
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
}
MatchedSingle(ParseNtResult::Block(block)) => mk_delimited(
block.span,
MetaVarKind::Block,
TokenStream::from_ast(block),
),
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
let stream = if let StmtKind::Empty = stmt.kind {
// FIXME: Properly collect tokens for empty statements.
TokenStream::token_alone(token::Semi, stmt.span)
} else {
TokenStream::from_ast(stmt)
};
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
}
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
pat.span,
MetaVarKind::Pat(*pat_kind),
TokenStream::from_ast(pat),
),
MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
let (can_begin_literal_maybe_minus, can_begin_string_literal) =
match &expr.kind {
ExprKind::Lit(_) => (true, true),
ExprKind::Unary(UnOp::Neg, e)
if matches!(&e.kind, ExprKind::Lit(_)) =>
{
(true, false)
}
_ => (false, false),
};
mk_delimited(
expr.span,
MetaVarKind::Expr {
kind: *kind,
can_begin_literal_maybe_minus,
can_begin_string_literal,
},
TokenStream::from_ast(expr),
)
}
MatchedSingle(ParseNtResult::Literal(lit)) => {
mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
}
MatchedSingle(ParseNtResult::Ty(ty)) => {
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
mk_delimited(
ty.span,
MetaVarKind::Ty { is_path },
TokenStream::from_ast(ty),
)
}
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
let has_meta_form = attr_item.meta_kind().is_some();
mk_delimited(
attr_item.span(),
MetaVarKind::Meta { has_meta_form },
TokenStream::from_ast(attr_item),
)
}
MatchedSingle(ParseNtResult::Path(path)) => {
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
}
MatchedSingle(ParseNtResult::Vis(vis)) => {
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
}
};
result.push(tt)
} else {
// If we aren't able to match the meta-var, we push it back into the result but
// with modified syntax context. (I believe this supports nested macros).
marker.mark_span(&mut sp);
marker.mark_span(&mut original_ident.span);
result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
result.push(TokenTree::Token(
Token::from_ast_ident(original_ident),
Spacing::Alone,
));
}
&mbe::TokenTree::MetaVar(sp, original_ident) => {
transcribe_metavar(&mut tscx, sp, original_ident)?;
}
// Replace meta-variable expressions with the result of their expansion.
mbe::TokenTree::MetaVarExpr(sp, expr) => {
transcribe_metavar_expr(
dcx,
expr,
interp,
&mut marker,
&repeats,
&mut result,
sp,
&psess.symbol_gallery,
)?;
mbe::TokenTree::MetaVarExpr(dspan, expr) => {
transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
}
// If we are entering a new delimiter, we push its contents to the `stack` to be
@ -427,21 +265,21 @@ pub(super) fn transcribe<'a>(
// jump back out of the Delimited, pop the result_stack and add the new results back to
// the previous results (from outside the Delimited).
&mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
marker.mark_span(&mut span.open);
marker.mark_span(&mut span.close);
stack.push(Frame::new_delimited(delimited, span, *spacing));
result_stack.push(mem::take(&mut result));
tscx.marker.mark_span(&mut span.open);
tscx.marker.mark_span(&mut span.close);
tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
tscx.result_stack.push(mem::take(&mut tscx.result));
}
// Nothing much to do here. Just push the token to the result, being careful to
// preserve syntax context.
&mbe::TokenTree::Token(mut token) => {
marker.mark_span(&mut token.span);
tscx.marker.mark_span(&mut token.span);
if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
marker.mark_span(&mut ident.span);
tscx.marker.mark_span(&mut ident.span);
}
let tt = TokenTree::Token(token, Spacing::Alone);
result.push(tt);
tscx.result.push(tt);
}
// There should be no meta-var declarations in the invocation of a macro.
@ -450,6 +288,305 @@ pub(super) fn transcribe<'a>(
}
}
/// Turn `$(...)*` sequences into tokens.
fn transcribe_sequence<'tx, 'itp>(
tscx: &mut TranscrCtx<'tx, 'itp>,
seq: &mbe::TokenTree,
seq_rep: &'itp mbe::SequenceRepetition,
) -> PResult<'tx, ()> {
let dcx = tscx.psess.dcx();
// We are descending into a sequence. We first make sure that the matchers in the RHS
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
// macro writer has made a mistake.
match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
LockstepIterSize::Unconstrained => {
return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
}
LockstepIterSize::Contradiction(msg) => {
// FIXME: this really ought to be caught at macro definition time... It
// happens when two meta-variables are used in the same repetition in a
// sequence, but they come from different sequence matchers and repeat
// different amounts.
return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
}
LockstepIterSize::Constraint(len, _) => {
// We do this to avoid an extra clone above. We know that this is a
// sequence already.
let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
// Is the repetition empty?
if len == 0 {
if seq.kleene.op == KleeneOp::OneOrMore {
// FIXME: this really ought to be caught at macro definition
// time... It happens when the Kleene operator in the matcher and
// the body for the same meta-variable do not match.
return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
}
} else {
// 0 is the initial counter (we have done 0 repetitions so far). `len`
// is the total number of repetitions we should generate.
tscx.repeats.push((0, len));
// The first time we encounter the sequence we push it to the stack. It
// then gets reused (see the beginning of the loop) until we are done
// repeating.
tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
}
}
}
Ok(())
}
/// Find the matched nonterminal from the macro invocation, and use it to replace
/// the meta-var.
///
/// We use `Spacing::Alone` everywhere here, because that's the conservative choice
/// and spacing of declarative macros is tricky. E.g. in this macro:
/// ```
/// macro_rules! idents {
/// ($($a:ident,)*) => { stringify!($($a)*) }
/// }
/// ```
/// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
/// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
/// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
/// producing "xyz", which is bad because it effectively merges tokens.
/// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
/// some of the unnecessary whitespace.
fn transcribe_metavar<'tx>(
tscx: &mut TranscrCtx<'tx, '_>,
mut sp: Span,
mut original_ident: Ident,
) -> PResult<'tx, ()> {
let dcx = tscx.psess.dcx();
let ident = MacroRulesNormalizedIdent::new(original_ident);
let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
// If we aren't able to match the meta-var, we push it back into the result but
// with modified syntax context. (I believe this supports nested macros).
tscx.marker.mark_span(&mut sp);
tscx.marker.mark_span(&mut original_ident.span);
tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
return Ok(());
};
// We wrap the tokens in invisible delimiters, unless they are already wrapped
// in invisible delimiters with the same `MetaVarKind`. Because some proc
// macros can't handle multiple layers of invisible delimiters of the same
// `MetaVarKind`. This loses some span info, though it hopefully won't matter.
let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
if stream.len() == 1 {
let tree = stream.iter().next().unwrap();
if let TokenTree::Delimited(_, _, delim, inner) = tree
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
&& mv_kind == *mvk
{
stream = inner.clone();
}
}
// Emit as a token stream within `Delimiter::Invisible` to maintain
// parsing priorities.
tscx.marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
// Both the open delim and close delim get the same span, which covers the
// `$foo` in the decl macro RHS.
TokenTree::Delimited(
DelimSpan::from_single(sp),
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
stream,
)
};
let tt = match cur_matched {
MatchedSingle(ParseNtResult::Tt(tt)) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups. Other variables are emitted into
// the output stream as groups with `Delimiter::Invisible` to maintain
// parsing priorities.
maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
}
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
tscx.marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtIdent(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
tscx.marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtLifetime(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Item(item)) => {
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
}
MatchedSingle(ParseNtResult::Block(block)) => {
mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
}
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
let stream = if let StmtKind::Empty = stmt.kind {
// FIXME: Properly collect tokens for empty statements.
TokenStream::token_alone(token::Semi, stmt.span)
} else {
TokenStream::from_ast(stmt)
};
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
}
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
}
MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
ExprKind::Lit(_) => (true, true),
ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
(true, false)
}
_ => (false, false),
};
mk_delimited(
expr.span,
MetaVarKind::Expr {
kind: *kind,
can_begin_literal_maybe_minus,
can_begin_string_literal,
},
TokenStream::from_ast(expr),
)
}
MatchedSingle(ParseNtResult::Literal(lit)) => {
mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
}
MatchedSingle(ParseNtResult::Ty(ty)) => {
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
}
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
let has_meta_form = attr_item.meta_kind().is_some();
mk_delimited(
attr_item.span(),
MetaVarKind::Meta { has_meta_form },
TokenStream::from_ast(attr_item),
)
}
MatchedSingle(ParseNtResult::Path(path)) => {
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
}
MatchedSingle(ParseNtResult::Vis(vis)) => {
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
}
};
tscx.result.push(tt);
Ok(())
}
/// Turn `${expr(...)}` metavariable expressionss into tokens.
fn transcribe_metavar_expr<'tx>(
tscx: &mut TranscrCtx<'tx, '_>,
dspan: DelimSpan,
expr: &MetaVarExpr,
) -> PResult<'tx, ()> {
let dcx = tscx.psess.dcx();
let tt = match *expr {
MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
MetaVarExpr::Count(original_ident, depth) => {
let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(count), None),
tscx.visited_dspan(dspan),
)
}
MetaVarExpr::Ignore(original_ident) => {
// Used to ensure that `original_ident` is present in the LHS
let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
return Ok(());
}
MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
Some((index, _)) => TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(*index), None),
tscx.visited_dspan(dspan),
),
None => {
return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
}
},
MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
Some((_, length)) => TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(*length), None),
tscx.visited_dspan(dspan),
),
None => {
return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
}
},
};
tscx.result.push(tt);
Ok(())
}
/// Handle the `${concat(...)}` metavariable expression.
fn metavar_expr_concat<'tx>(
tscx: &mut TranscrCtx<'tx, '_>,
dspan: DelimSpan,
elements: &[MetaVarExprConcatElem],
) -> PResult<'tx, TokenTree> {
let dcx = tscx.psess.dcx();
let mut concatenated = String::new();
for element in elements.into_iter() {
let symbol = match element {
MetaVarExprConcatElem::Ident(elem) => elem.name,
MetaVarExprConcatElem::Literal(elem) => *elem,
MetaVarExprConcatElem::Var(ident) => {
match matched_from_ident(dcx, *ident, tscx.interp)? {
NamedMatch::MatchedSeq(named_matches) => {
let Some((curr_idx, _)) = tscx.repeats.last() else {
return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
};
match &named_matches[*curr_idx] {
// FIXME(c410-f3r) Nested repetitions are unimplemented
MatchedSeq(_) => unimplemented!(),
MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
}
}
NamedMatch::MatchedSingle(pnr) => {
extract_symbol_from_pnr(dcx, pnr, ident.span)?
}
}
}
};
concatenated.push_str(symbol.as_str());
}
let symbol = nfc_normalize(&concatenated);
let concatenated_span = tscx.visited_dspan(dspan);
if !rustc_lexer::is_ident(symbol.as_str()) {
return Err(dcx.struct_span_err(
concatenated_span,
"`${concat(..)}` is not generating a valid identifier",
));
}
tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
// The current implementation marks the span as coming from the macro regardless of
// contexts of the concatenated identifiers but this behavior may change in the
// future.
Ok(TokenTree::Token(
Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
Spacing::Alone,
))
}
/// Store the metavariable span for this original span into a side table.
/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
/// An optimal encoding for inlined spans will need to be selected to minimize regressions.
@ -671,13 +808,13 @@ fn lockstep_iter_size(
/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
/// declared inside a single repetition and the index `1` implies two nested repetitions.
fn count_repetitions<'a>(
dcx: DiagCtxtHandle<'a>,
fn count_repetitions<'dx>(
dcx: DiagCtxtHandle<'dx>,
depth_user: usize,
mut matched: &NamedMatch,
repeats: &[(usize, usize)],
sp: &DelimSpan,
) -> PResult<'a, usize> {
) -> PResult<'dx, usize> {
// Recursively count the number of matches in `matched` at given depth
// (or at the top-level of `matched` if no depth is given).
fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
@ -762,102 +899,6 @@ fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &s
dcx.struct_span_err(span, msg)
}
fn transcribe_metavar_expr<'a>(
dcx: DiagCtxtHandle<'a>,
expr: &MetaVarExpr,
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
marker: &mut Marker,
repeats: &[(usize, usize)],
result: &mut Vec<TokenTree>,
sp: &DelimSpan,
symbol_gallery: &SymbolGallery,
) -> PResult<'a, ()> {
let mut visited_span = || {
let mut span = sp.entire();
marker.mark_span(&mut span);
span
};
match *expr {
MetaVarExpr::Concat(ref elements) => {
let mut concatenated = String::new();
for element in elements.into_iter() {
let symbol = match element {
MetaVarExprConcatElem::Ident(elem) => elem.name,
MetaVarExprConcatElem::Literal(elem) => *elem,
MetaVarExprConcatElem::Var(ident) => {
match matched_from_ident(dcx, *ident, interp)? {
NamedMatch::MatchedSeq(named_matches) => {
let Some((curr_idx, _)) = repeats.last() else {
return Err(dcx.struct_span_err(sp.entire(), "invalid syntax"));
};
match &named_matches[*curr_idx] {
// FIXME(c410-f3r) Nested repetitions are unimplemented
MatchedSeq(_) => unimplemented!(),
MatchedSingle(pnr) => {
extract_symbol_from_pnr(dcx, pnr, ident.span)?
}
}
}
NamedMatch::MatchedSingle(pnr) => {
extract_symbol_from_pnr(dcx, pnr, ident.span)?
}
}
}
};
concatenated.push_str(symbol.as_str());
}
let symbol = nfc_normalize(&concatenated);
let concatenated_span = visited_span();
if !rustc_lexer::is_ident(symbol.as_str()) {
return Err(dcx.struct_span_err(
concatenated_span,
"`${concat(..)}` is not generating a valid identifier",
));
}
symbol_gallery.insert(symbol, concatenated_span);
// The current implementation marks the span as coming from the macro regardless of
// contexts of the concatenated identifiers but this behavior may change in the
// future.
result.push(TokenTree::Token(
Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
Spacing::Alone,
));
}
MetaVarExpr::Count(original_ident, depth) => {
let matched = matched_from_ident(dcx, original_ident, interp)?;
let count = count_repetitions(dcx, depth, matched, repeats, sp)?;
let tt = TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(count), None),
visited_span(),
);
result.push(tt);
}
MetaVarExpr::Ignore(original_ident) => {
// Used to ensure that `original_ident` is present in the LHS
let _ = matched_from_ident(dcx, original_ident, interp)?;
}
MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
Some((index, _)) => {
result.push(TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(*index), None),
visited_span(),
));
}
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")),
},
MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) {
Some((_, length)) => {
result.push(TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(*length), None),
visited_span(),
));
}
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")),
},
}
Ok(())
}
/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
fn extract_symbol_from_pnr<'a>(
dcx: DiagCtxtHandle<'a>,

View file

@ -52,10 +52,9 @@ pub struct Compiler {
pub(crate) fn parse_cfg(dcx: DiagCtxtHandle<'_>, cfgs: Vec<String>) -> Cfg {
cfgs.into_iter()
.map(|s| {
let psess = ParseSess::with_silent_emitter(
let psess = ParseSess::with_fatal_emitter(
vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
format!("this error occurred on the command line: `--cfg={s}`"),
true,
);
let filename = FileName::cfg_spec_source_code(&s);
@ -116,10 +115,9 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch
let mut check_cfg = CheckCfg { exhaustive_names, exhaustive_values, ..CheckCfg::default() };
for s in specs {
let psess = ParseSess::with_silent_emitter(
let psess = ParseSess::with_fatal_emitter(
vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
format!("this error occurred on the command line: `--check-cfg={s}`"),
true,
);
let filename = FileName::cfg_spec_source_code(&s);

View file

@ -14,6 +14,7 @@ use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, Toke
use rustc_ast::{self as ast, PatKind, visit};
use rustc_ast_pretty::pprust::item_to_string;
use rustc_errors::emitter::{HumanEmitter, OutputTheme};
use rustc_errors::translation::Translator;
use rustc_errors::{DiagCtxt, MultiSpan, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{FilePathMapping, SourceMap};
@ -41,9 +42,8 @@ fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Arc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
let output = Arc::new(Mutex::new(Vec::new()));
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
let fallback_bundle =
rustc_errors::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle)
let translator = Translator::with_fallback_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), translator)
.sm(Some(source_map.clone()))
.diagnostic_width(Some(140));
emitter = emitter.theme(theme);

View file

@ -292,6 +292,7 @@ fn emit_malformed_attribute(
| sym::align
| sym::deprecated
| sym::optimize
| sym::cold
) {
return;
}

View file

@ -149,10 +149,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
}
Attribute::Parsed(AttributeKind::Repr(_)) => { /* handled below this loop and elsewhere */
}
Attribute::Parsed(AttributeKind::Cold(attr_span)) => {
self.check_cold(hir_id, *attr_span, span, target)
}
Attribute::Parsed(AttributeKind::Align { align, span: repr_span }) => {
self.check_align(span, target, *align, *repr_span)
}
Attribute::Parsed(
AttributeKind::BodyStability { .. }
| AttributeKind::ConstStabilityIndirect
@ -245,7 +247,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
[sym::ffi_pure, ..] => self.check_ffi_pure(attr.span(), attrs, target),
[sym::ffi_const, ..] => self.check_ffi_const(attr.span(), target),
[sym::link_ordinal, ..] => self.check_link_ordinal(attr, span, target),
[sym::cold, ..] => self.check_cold(hir_id, attr, span, target),
[sym::link, ..] => self.check_link(hir_id, attr, span, target),
[sym::link_name, ..] => self.check_link_name(hir_id, attr, span, target),
[sym::link_section, ..] => self.check_link_section(hir_id, attr, span, target),
@ -651,8 +652,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
sym::repr,
sym::align,
sym::rustc_std_internal_symbol,
// code generation
sym::cold,
// documentation
sym::doc,
];
@ -688,7 +687,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
Attribute::Parsed(
AttributeKind::Deprecation { .. }
| AttributeKind::Repr { .. }
| AttributeKind::Align { .. },
| AttributeKind::Align { .. }
| AttributeKind::Cold(..),
) => {
continue;
}
@ -1640,7 +1640,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
}
/// Checks if `#[cold]` is applied to a non-function.
fn check_cold(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) {
fn check_cold(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) {
match target {
Target::Fn | Target::Method(..) | Target::ForeignFn | Target::Closure => {}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
@ -1648,7 +1648,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "cold");
self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "cold");
}
_ => {
// FIXME: #[cold] was previously allowed on non-functions and some crates used
@ -1656,7 +1656,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
self.tcx.emit_node_span_lint(
UNUSED_ATTRIBUTES,
hir_id,
attr.span(),
attr_span,
errors::Cold { span, on_crate: hir_id == CRATE_HIR_ID },
);
}

View file

@ -8,10 +8,11 @@ use rustc_ast::attr::AttrIdGenerator;
use rustc_ast::node_id::NodeId;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
use rustc_data_structures::sync::{AppendOnlyVec, Lock};
use rustc_errors::emitter::{HumanEmitter, SilentEmitter, stderr_destination};
use rustc_errors::emitter::{FatalOnlyEmitter, HumanEmitter, stderr_destination};
use rustc_errors::translation::Translator;
use rustc_errors::{
ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan,
StashKey, fallback_fluent_bundle,
StashKey,
};
use rustc_feature::{GateIssue, UnstableFeatures, find_feature_issue};
use rustc_span::edition::Edition;
@ -242,10 +243,10 @@ pub struct ParseSess {
impl ParseSess {
/// Used for testing.
pub fn new(locale_resources: Vec<&'static str>) -> Self {
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
let translator = Translator::with_fallback_bundle(locale_resources, false);
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new(
HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle)
HumanEmitter::new(stderr_destination(ColorConfig::Auto), translator)
.sm(Some(Arc::clone(&sm))),
);
let dcx = DiagCtxt::new(emitter);
@ -274,19 +275,14 @@ impl ParseSess {
}
}
pub fn with_silent_emitter(
locale_resources: Vec<&'static str>,
fatal_note: String,
emit_fatal_diagnostic: bool,
) -> Self {
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
pub fn with_fatal_emitter(locale_resources: Vec<&'static str>, fatal_note: String) -> Self {
let translator = Translator::with_fallback_bundle(locale_resources, false);
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
let fatal_emitter =
Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle));
let dcx = DiagCtxt::new(Box::new(SilentEmitter {
Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), translator));
let dcx = DiagCtxt::new(Box::new(FatalOnlyEmitter {
fatal_emitter,
fatal_note: Some(fatal_note),
emit_fatal_diagnostic,
}))
.disable_warnings();
ParseSess::with_dcx(dcx, sm)

View file

@ -19,9 +19,10 @@ use rustc_errors::emitter::{
};
use rustc_errors::json::JsonEmitter;
use rustc_errors::timings::TimingSectionHandler;
use rustc_errors::translation::Translator;
use rustc_errors::{
Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, Diagnostic, ErrorGuaranteed, FatalAbort,
FluentBundle, LazyFallbackBundle, TerminalUrl, fallback_fluent_bundle,
TerminalUrl, fallback_fluent_bundle,
};
use rustc_macros::HashStable_Generic;
pub use rustc_span::def_id::StableCrateId;
@ -948,8 +949,7 @@ impl Session {
fn default_emitter(
sopts: &config::Options,
source_map: Arc<SourceMap>,
bundle: Option<Arc<FluentBundle>>,
fallback_bundle: LazyFallbackBundle,
translator: Translator,
) -> Box<DynEmitter> {
let macro_backtrace = sopts.unstable_opts.macro_backtrace;
let track_diagnostics = sopts.unstable_opts.track_diagnostics;
@ -974,17 +974,11 @@ fn default_emitter(
let short = kind.short();
if let HumanReadableErrorType::AnnotateSnippet = kind {
let emitter = AnnotateSnippetEmitter::new(
source_map,
bundle,
fallback_bundle,
short,
macro_backtrace,
);
let emitter =
AnnotateSnippetEmitter::new(source_map, translator, short, macro_backtrace);
Box::new(emitter.ui_testing(sopts.unstable_opts.ui_testing))
} else {
let emitter = HumanEmitter::new(stderr_destination(color_config), fallback_bundle)
.fluent_bundle(bundle)
let emitter = HumanEmitter::new(stderr_destination(color_config), translator)
.sm(source_map)
.short_message(short)
.diagnostic_width(sopts.diagnostic_width)
@ -1006,12 +1000,11 @@ fn default_emitter(
JsonEmitter::new(
Box::new(io::BufWriter::new(io::stderr())),
source_map,
fallback_bundle,
translator,
pretty,
json_rendered,
color_config,
)
.fluent_bundle(bundle)
.ui_testing(sopts.unstable_opts.ui_testing)
.ignored_directories_in_source_blocks(
sopts.unstable_opts.ignore_directory_in_diagnostics_source_blocks.clone(),
@ -1030,7 +1023,7 @@ fn default_emitter(
pub fn build_session(
sopts: config::Options,
io: CompilerIO,
bundle: Option<Arc<rustc_errors::FluentBundle>>,
fluent_bundle: Option<Arc<rustc_errors::FluentBundle>>,
registry: rustc_errors::registry::Registry,
fluent_resources: Vec<&'static str>,
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
@ -1052,12 +1045,15 @@ pub fn build_session(
let cap_lints_allow = sopts.lint_cap.is_some_and(|cap| cap == lint::Allow);
let can_emit_warnings = !(warnings_allow || cap_lints_allow);
let fallback_bundle = fallback_fluent_bundle(
fluent_resources,
sopts.unstable_opts.translate_directionality_markers,
);
let translator = Translator {
fluent_bundle,
fallback_fluent_bundle: fallback_fluent_bundle(
fluent_resources,
sopts.unstable_opts.translate_directionality_markers,
),
};
let source_map = rustc_span::source_map::get_source_map().unwrap();
let emitter = default_emitter(&sopts, Arc::clone(&source_map), bundle, fallback_bundle);
let emitter = default_emitter(&sopts, Arc::clone(&source_map), translator);
let mut dcx = DiagCtxt::new(emitter)
.with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings))
@ -1500,13 +1496,13 @@ impl EarlyDiagCtxt {
fn mk_emitter(output: ErrorOutputType) -> Box<DynEmitter> {
// FIXME(#100717): early errors aren't translated at the moment, so this is fine, but it will
// need to reference every crate that might emit an early error for translation to work.
let fallback_bundle =
fallback_fluent_bundle(vec![rustc_errors::DEFAULT_LOCALE_RESOURCE], false);
let translator =
Translator::with_fallback_bundle(vec![rustc_errors::DEFAULT_LOCALE_RESOURCE], false);
let emitter: Box<DynEmitter> = match output {
config::ErrorOutputType::HumanReadable { kind, color_config } => {
let short = kind.short();
Box::new(
HumanEmitter::new(stderr_destination(color_config), fallback_bundle)
HumanEmitter::new(stderr_destination(color_config), translator)
.theme(if let HumanReadableErrorType::Unicode = kind {
OutputTheme::Unicode
} else {
@ -1519,7 +1515,7 @@ fn mk_emitter(output: ErrorOutputType) -> Box<DynEmitter> {
Box::new(JsonEmitter::new(
Box::new(io::BufWriter::new(io::stderr())),
Some(Arc::new(SourceMap::new(FilePathMapping::empty()))),
fallback_bundle,
translator,
pretty,
json_rendered,
color_config,

View file

@ -2764,6 +2764,89 @@ impl<T> [T] {
None
}
/// Returns a subslice with the optional prefix removed.
///
/// If the slice starts with `prefix`, returns the subslice after the prefix. If `prefix`
/// is empty or the slice does not start with `prefix`, simply returns the original slice.
/// If `prefix` is equal to the original slice, returns an empty slice.
///
/// # Examples
///
/// ```
/// #![feature(trim_prefix_suffix)]
///
/// let v = &[10, 40, 30];
///
/// // Prefix present - removes it
/// assert_eq!(v.trim_prefix(&[10]), &[40, 30][..]);
/// assert_eq!(v.trim_prefix(&[10, 40]), &[30][..]);
/// assert_eq!(v.trim_prefix(&[10, 40, 30]), &[][..]);
///
/// // Prefix absent - returns original slice
/// assert_eq!(v.trim_prefix(&[50]), &[10, 40, 30][..]);
/// assert_eq!(v.trim_prefix(&[10, 50]), &[10, 40, 30][..]);
///
/// let prefix : &str = "he";
/// assert_eq!(b"hello".trim_prefix(prefix.as_bytes()), b"llo".as_ref());
/// ```
#[must_use = "returns the subslice without modifying the original"]
#[unstable(feature = "trim_prefix_suffix", issue = "142312")]
pub fn trim_prefix<P: SlicePattern<Item = T> + ?Sized>(&self, prefix: &P) -> &[T]
where
T: PartialEq,
{
// This function will need rewriting if and when SlicePattern becomes more sophisticated.
let prefix = prefix.as_slice();
let n = prefix.len();
if n <= self.len() {
let (head, tail) = self.split_at(n);
if head == prefix {
return tail;
}
}
self
}
/// Returns a subslice with the optional suffix removed.
///
/// If the slice ends with `suffix`, returns the subslice before the suffix. If `suffix`
/// is empty or the slice does not end with `suffix`, simply returns the original slice.
/// If `suffix` is equal to the original slice, returns an empty slice.
///
/// # Examples
///
/// ```
/// #![feature(trim_prefix_suffix)]
///
/// let v = &[10, 40, 30];
///
/// // Suffix present - removes it
/// assert_eq!(v.trim_suffix(&[30]), &[10, 40][..]);
/// assert_eq!(v.trim_suffix(&[40, 30]), &[10][..]);
/// assert_eq!(v.trim_suffix(&[10, 40, 30]), &[][..]);
///
/// // Suffix absent - returns original slice
/// assert_eq!(v.trim_suffix(&[50]), &[10, 40, 30][..]);
/// assert_eq!(v.trim_suffix(&[50, 30]), &[10, 40, 30][..]);
/// ```
#[must_use = "returns the subslice without modifying the original"]
#[unstable(feature = "trim_prefix_suffix", issue = "142312")]
pub fn trim_suffix<P: SlicePattern<Item = T> + ?Sized>(&self, suffix: &P) -> &[T]
where
T: PartialEq,
{
// This function will need rewriting if and when SlicePattern becomes more sophisticated.
let suffix = suffix.as_slice();
let (len, n) = (self.len(), suffix.len());
if n <= len {
let (head, tail) = self.split_at(len - n);
if tail == suffix {
return head;
}
}
self
}
/// Binary searches this slice for a given element.
/// If the slice is not sorted, the returned result is unspecified and
/// meaningless.

View file

@ -2426,6 +2426,83 @@ impl str {
suffix.strip_suffix_of(self)
}
/// Returns a string slice with the optional prefix removed.
///
/// If the string starts with the pattern `prefix`, returns the substring after the prefix.
/// Unlike [`strip_prefix`], this method always returns `&str` for easy method chaining,
/// instead of returning [`Option<&str>`].
///
/// If the string does not start with `prefix`, returns the original string unchanged.
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
/// [`char`]: prim@char
/// [pattern]: self::pattern
/// [`strip_prefix`]: Self::strip_prefix
///
/// # Examples
///
/// ```
/// #![feature(trim_prefix_suffix)]
///
/// // Prefix present - removes it
/// assert_eq!("foo:bar".trim_prefix("foo:"), "bar");
/// assert_eq!("foofoo".trim_prefix("foo"), "foo");
///
/// // Prefix absent - returns original string
/// assert_eq!("foo:bar".trim_prefix("bar"), "foo:bar");
///
/// // Method chaining example
/// assert_eq!("<https://example.com/>".trim_prefix('<').trim_suffix('>'), "https://example.com/");
/// ```
#[must_use = "this returns the remaining substring as a new slice, \
without modifying the original"]
#[unstable(feature = "trim_prefix_suffix", issue = "142312")]
pub fn trim_prefix<P: Pattern>(&self, prefix: P) -> &str {
prefix.strip_prefix_of(self).unwrap_or(self)
}
/// Returns a string slice with the optional suffix removed.
///
/// If the string ends with the pattern `suffix`, returns the substring before the suffix.
/// Unlike [`strip_suffix`], this method always returns `&str` for easy method chaining,
/// instead of returning [`Option<&str>`].
///
/// If the string does not end with `suffix`, returns the original string unchanged.
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
/// [`char`]: prim@char
/// [pattern]: self::pattern
/// [`strip_suffix`]: Self::strip_suffix
///
/// # Examples
///
/// ```
/// #![feature(trim_prefix_suffix)]
///
/// // Suffix present - removes it
/// assert_eq!("bar:foo".trim_suffix(":foo"), "bar");
/// assert_eq!("foofoo".trim_suffix("foo"), "foo");
///
/// // Suffix absent - returns original string
/// assert_eq!("bar:foo".trim_suffix("bar"), "bar:foo");
///
/// // Method chaining example
/// assert_eq!("<https://example.com/>".trim_prefix('<').trim_suffix('>'), "https://example.com/");
/// ```
#[must_use = "this returns the remaining substring as a new slice, \
without modifying the original"]
#[unstable(feature = "trim_prefix_suffix", issue = "142312")]
pub fn trim_suffix<P: Pattern>(&self, suffix: P) -> &str
where
for<'a> P::Searcher<'a>: ReverseSearcher<'a>,
{
suffix.strip_suffix_of(self).unwrap_or(self)
}
/// Returns a string slice with all suffixes that match a pattern
/// repeatedly removed.
///

View file

@ -149,15 +149,12 @@ pub(crate) fn new_dcx(
diagnostic_width: Option<usize>,
unstable_opts: &UnstableOptions,
) -> rustc_errors::DiagCtxt {
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
);
let translator = rustc_driver::default_translator();
let emitter: Box<DynEmitter> = match error_format {
ErrorOutputType::HumanReadable { kind, color_config } => {
let short = kind.short();
Box::new(
HumanEmitter::new(stderr_destination(color_config), fallback_bundle)
HumanEmitter::new(stderr_destination(color_config), translator)
.sm(source_map.map(|sm| sm as _))
.short_message(short)
.diagnostic_width(diagnostic_width)
@ -178,7 +175,7 @@ pub(crate) fn new_dcx(
JsonEmitter::new(
Box::new(io::BufWriter::new(io::stderr())),
Some(source_map),
fallback_bundle,
translator,
pretty,
json_rendered,
color_config,

View file

@ -456,16 +456,13 @@ fn parse_source(
let filename = FileName::anon_source_code(&wrapped_source);
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
);
let translator = rustc_driver::default_translator();
info.supports_color =
HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle.clone())
HumanEmitter::new(stderr_destination(ColorConfig::Auto), translator.clone())
.supports_color();
// Any errors in parsing should also appear when the doctest is compiled for real, so just
// send all the errors that the parser emits directly into a `Sink` instead of stderr.
let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle);
let emitter = HumanEmitter::new(Box::new(io::sink()), translator);
// FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();

View file

@ -68,8 +68,6 @@ pub(crate) trait FormatRenderer<'tcx>: Sized {
/// Post processing hook for cleanup and dumping output to files.
fn after_krate(self) -> Result<(), Error>;
fn cache(&self) -> &Cache;
}
fn run_format_inner<'tcx, T: FormatRenderer<'tcx>>(

View file

@ -875,8 +875,4 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
Ok(())
}
fn cache(&self) -> &Cache {
&self.shared.cache
}
}

View file

@ -5394,43 +5394,6 @@ function updateCrate(ev) {
search(true);
}
// @ts-expect-error
function initSearch(searchIndx) {
rawSearchIndex = searchIndx;
if (typeof window !== "undefined") {
// @ts-expect-error
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
registerSearchEvents();
// If there's a search term in the URL, execute the search now.
if (window.searchState.getQueryStringParams().search) {
search();
}
} else if (typeof exports !== "undefined") {
// @ts-expect-error
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
exports.docSearch = docSearch;
exports.parseQuery = DocSearch.parseQuery;
}
}
if (typeof exports !== "undefined") {
exports.initSearch = initSearch;
}
if (typeof window !== "undefined") {
// @ts-expect-error
window.initSearch = initSearch;
// @ts-expect-error
if (window.searchIndex !== undefined) {
// @ts-expect-error
initSearch(window.searchIndex);
}
} else {
// Running in Node, not a browser. Run initSearch just to produce the
// exports.
initSearch(new Map());
}
// Parts of this code are based on Lucene, which is licensed under the
// Apache/2.0 license.
// More information found here:
@ -5909,3 +5872,44 @@ Lev1TParametricDescription.prototype.toStates3 = /*3 bits per value */ new Int32
Lev1TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new Int32Array([
0xa0fc0000,0x5555ba08,0x55555555,
]);
// ====================
// WARNING: Nothing should be added below this comment: we need the `initSearch` function to
// be called ONLY when the whole file has been parsed and loaded.
// @ts-expect-error
function initSearch(searchIndx) {
rawSearchIndex = searchIndx;
if (typeof window !== "undefined") {
// @ts-expect-error
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
registerSearchEvents();
// If there's a search term in the URL, execute the search now.
if (window.searchState.getQueryStringParams().search) {
search();
}
} else if (typeof exports !== "undefined") {
// @ts-expect-error
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
exports.docSearch = docSearch;
exports.parseQuery = DocSearch.parseQuery;
}
}
if (typeof exports !== "undefined") {
exports.initSearch = initSearch;
}
if (typeof window !== "undefined") {
// @ts-expect-error
window.initSearch = initSearch;
// @ts-expect-error
if (window.searchIndex !== undefined) {
// @ts-expect-error
initSearch(window.searchIndex);
}
} else {
// Running in Node, not a browser. Run initSearch just to produce the
// exports.
initSearch(new Map());
}

View file

@ -16,7 +16,6 @@ use rustdoc_json_types::*;
use thin_vec::ThinVec;
use crate::clean::{self, ItemId};
use crate::formats::FormatRenderer;
use crate::formats::item_type::ItemType;
use crate::json::JsonRenderer;
use crate::passes::collect_intra_doc_links::UrlFragment;
@ -41,7 +40,7 @@ impl JsonRenderer<'_> {
})
.collect();
let docs = item.opt_doc_value();
let attrs = item.attributes_and_repr(self.tcx, self.cache(), true);
let attrs = item.attributes_and_repr(self.tcx, &self.cache, true);
let span = item.span(self.tcx);
let visibility = item.visibility(self.tcx);
let clean::ItemInner { name, item_id, .. } = *item.inner;

View file

@ -376,8 +376,4 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
self.serialize_and_write(output_crate, BufWriter::new(stdout().lock()), "<stdout>")
}
}
fn cache(&self) -> &Cache {
&self.cache
}
}

View file

@ -6,8 +6,8 @@ use std::sync::Arc;
use rustc_data_structures::sync::Lock;
use rustc_errors::emitter::Emitter;
use rustc_errors::registry::Registry;
use rustc_errors::translation::{Translate, to_fluent_args};
use rustc_errors::{Applicability, DiagCtxt, DiagInner, LazyFallbackBundle};
use rustc_errors::translation::{Translator, to_fluent_args};
use rustc_errors::{Applicability, DiagCtxt, DiagInner};
use rustc_parse::{source_str_to_stream, unwrap_or_emit_fatal};
use rustc_resolve::rustdoc::source_span_for_markdown_range;
use rustc_session::parse::ParseSess;
@ -36,11 +36,8 @@ fn check_rust_syntax(
code_block: RustCodeBlock,
) {
let buffer = Arc::new(Lock::new(Buffer::default()));
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
);
let emitter = BufferEmitter { buffer: Arc::clone(&buffer), fallback_bundle };
let translator = rustc_driver::default_translator();
let emitter = BufferEmitter { buffer: Arc::clone(&buffer), translator };
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
@ -149,17 +146,7 @@ struct Buffer {
struct BufferEmitter {
buffer: Arc<Lock<Buffer>>,
fallback_bundle: LazyFallbackBundle,
}
impl Translate for BufferEmitter {
fn fluent_bundle(&self) -> Option<&rustc_errors::FluentBundle> {
None
}
fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
&self.fallback_bundle
}
translator: Translator,
}
impl Emitter for BufferEmitter {
@ -168,6 +155,7 @@ impl Emitter for BufferEmitter {
let fluent_args = to_fluent_args(diag.args.iter());
let translated_main_message = self
.translator
.translate_message(&diag.messages[0].0, &fluent_args)
.unwrap_or_else(|e| panic!("{e}"));
@ -180,4 +168,8 @@ impl Emitter for BufferEmitter {
fn source_map(&self) -> Option<&SourceMap> {
None
}
fn translator(&self) -> &Translator {
&self.translator
}
}

View file

@ -37,8 +37,8 @@ pub type FxHashMap<K, V> = HashMap<K, V>; // re-export for use in src/librustdoc
// will instead cause conflicts. See #94591 for more. (This paragraph and the "Latest feature" line
// are deliberately not in a doc comment, because they need not be in public docs.)
//
// Latest feature: Pretty printing of optimize attributes changed
pub const FORMAT_VERSION: u32 = 49;
// Latest feature: Pretty printing of cold attributes changed
pub const FORMAT_VERSION: u32 = 50;
/// The root of the emitted JSON blob.
///

View file

@ -42,9 +42,8 @@ pub fn check(
let mut test_attr_spans = vec![];
let filename = FileName::anon_source_code(&code);
let fallback_bundle =
rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle);
let translator = rustc_driver::default_translator();
let emitter = HumanEmitter::new(Box::new(io::sink()), translator);
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
#[expect(clippy::arc_with_non_send_sync)] // `Arc` is expected by with_dcx
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));

View file

@ -5,7 +5,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_errors::emitter::{DynEmitter, Emitter, HumanEmitter, SilentEmitter, stderr_destination};
use rustc_errors::registry::Registry;
use rustc_errors::translation::Translate;
use rustc_errors::translation::Translator;
use rustc_errors::{ColorConfig, Diag, DiagCtxt, DiagInner, Level as DiagnosticLevel};
use rustc_session::parse::ParseSess as RawParseSess;
use rustc_span::{
@ -47,16 +47,6 @@ impl SilentOnIgnoredFilesEmitter {
}
}
impl Translate for SilentOnIgnoredFilesEmitter {
fn fluent_bundle(&self) -> Option<&rustc_errors::FluentBundle> {
self.emitter.fluent_bundle()
}
fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
self.emitter.fallback_fluent_bundle()
}
}
impl Emitter for SilentOnIgnoredFilesEmitter {
fn source_map(&self) -> Option<&SourceMap> {
None
@ -84,6 +74,10 @@ impl Emitter for SilentOnIgnoredFilesEmitter {
}
self.handle_non_ignoreable_error(diag, registry);
}
fn translator(&self) -> &Translator {
self.emitter.translator()
}
}
impl From<Color> for ColorConfig {
@ -110,23 +104,15 @@ fn default_dcx(
ColorConfig::Never
};
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
);
let emitter = Box::new(
HumanEmitter::new(stderr_destination(emit_color), fallback_bundle)
.sm(Some(source_map.clone())),
);
let translator = rustc_driver::default_translator();
let emitter: Box<DynEmitter> = if !show_parse_errors {
Box::new(SilentEmitter {
fatal_emitter: emitter,
fatal_note: None,
emit_fatal_diagnostic: false,
})
let emitter: Box<DynEmitter> = if show_parse_errors {
Box::new(
HumanEmitter::new(stderr_destination(emit_color), translator)
.sm(Some(source_map.clone())),
)
} else {
emitter
Box::new(SilentEmitter { translator })
};
DiagCtxt::new(Box::new(SilentOnIgnoredFilesEmitter {
has_non_ignorable_parser_errors: false,
@ -205,7 +191,7 @@ impl ParseSess {
}
pub(crate) fn set_silent_emitter(&mut self) {
self.raw_psess.dcx().make_silent(None, false);
self.raw_psess.dcx().make_silent();
}
pub(crate) fn span_to_filename(&self, span: Span) -> FileName {
@ -335,16 +321,6 @@ mod tests {
num_emitted_errors: Arc<AtomicU32>,
}
impl Translate for TestEmitter {
fn fluent_bundle(&self) -> Option<&rustc_errors::FluentBundle> {
None
}
fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
panic!("test emitter attempted to translate a diagnostic");
}
}
impl Emitter for TestEmitter {
fn source_map(&self) -> Option<&SourceMap> {
None
@ -353,6 +329,10 @@ mod tests {
fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {
self.num_emitted_errors.fetch_add(1, Ordering::Release);
}
fn translator(&self) -> &Translator {
panic!("test emitter attempted to translate a diagnostic");
}
}
fn build_diagnostic(level: DiagnosticLevel, span: Option<MultiSpan>) -> DiagInner {

View file

@ -0,0 +1,3 @@
#[cold = true]
//~^ ERROR malformed `cold` attribute input [E0565]
fn main() {}

View file

@ -0,0 +1,12 @@
error[E0565]: malformed `cold` attribute input
--> $DIR/expected-word.rs:1:1
|
LL | #[cold = true]
| ^^^^^^^------^
| | |
| | didn't expect any arguments here
| help: must be of the form: `#[cold]`
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0565`.

View file

@ -379,14 +379,6 @@ warning: `#[proc_macro_derive]` only has an effect on functions
LL | #![proc_macro_derive()]
| ^^^^^^^^^^^^^^^^^^^^^^^
warning: attribute should be applied to a function definition
--> $DIR/issue-43106-gating-of-builtin-attrs.rs:62:1
|
LL | #![cold]
| ^^^^^^^^ cannot be applied to crates
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
warning: attribute should be applied to an `extern` block with non-Rust ABI
--> $DIR/issue-43106-gating-of-builtin-attrs.rs:64:1
|
@ -417,6 +409,14 @@ warning: `#[must_use]` has no effect when applied to a module
LL | #![must_use]
| ^^^^^^^^^^^^
warning: attribute should be applied to a function definition
--> $DIR/issue-43106-gating-of-builtin-attrs.rs:62:1
|
LL | #![cold]
| ^^^^^^^^ cannot be applied to crates
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
warning: `#[macro_use]` only has an effect on `extern crate` and modules
--> $DIR/issue-43106-gating-of-builtin-attrs.rs:176:5
|

View file

@ -38,7 +38,7 @@ LL | #[repr]
| ^^^^^^^
| |
| expected this to be a list
| help: must be of the form: `#[repr(C)]`
| help: must be of the form: `#[repr(C | Rust | align(...) | packed(...) | <integer type> | transparent)]`
error[E0539]: malformed `inline` attribute input
--> $DIR/issue-43988.rs:30:5
@ -64,7 +64,7 @@ LL | let _z = #[repr] 1;
| ^^^^^^^
| |
| expected this to be a list
| help: must be of the form: `#[repr(C)]`
| help: must be of the form: `#[repr(C | Rust | align(...) | packed(...) | <integer type> | transparent)]`
error[E0518]: attribute should be applied to function or closure
--> $DIR/issue-43988.rs:5:5

View file

@ -102,18 +102,6 @@ note: attribute also specified here
LL | #[automatically_derived]
| ^^^^^^^^^^^^^^^^^^^^^^^^
error: unused attribute
--> $DIR/unused-attr-duplicate.rs:77:1
|
LL | #[cold]
| ^^^^^^^ help: remove this attribute
|
note: attribute also specified here
--> $DIR/unused-attr-duplicate.rs:76:1
|
LL | #[cold]
| ^^^^^^^
error: unused attribute
--> $DIR/unused-attr-duplicate.rs:79:1
|
@ -289,5 +277,17 @@ LL | #[inline(always)]
| ^^^^^^^^^^^^^^^^^
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
error: unused attribute
--> $DIR/unused-attr-duplicate.rs:77:1
|
LL | #[cold]
| ^^^^^^^ help: remove this attribute
|
note: attribute also specified here
--> $DIR/unused-attr-duplicate.rs:76:1
|
LL | #[cold]
| ^^^^^^^
error: aborting due to 23 previous errors

View file

@ -5,7 +5,7 @@ LL | #[repr]
| ^^^^^^^
| |
| expected this to be a list
| help: must be of the form: `#[repr(C)]`
| help: must be of the form: `#[repr(C | Rust | align(...) | packed(...) | <integer type> | transparent)]`
error[E0539]: malformed `repr` attribute input
--> $DIR/repr.rs:4:1
@ -14,7 +14,7 @@ LL | #[repr = "B"]
| ^^^^^^^^^^^^^
| |
| expected this to be a list
| help: must be of the form: `#[repr(C)]`
| help: must be of the form: `#[repr(C | Rust | align(...) | packed(...) | <integer type> | transparent)]`
error[E0539]: malformed `repr` attribute input
--> $DIR/repr.rs:7:1
@ -23,7 +23,7 @@ LL | #[repr = "C"]
| ^^^^^^^^^^^^^
| |
| expected this to be a list
| help: must be of the form: `#[repr(C)]`
| help: must be of the form: `#[repr(C | Rust | align(...) | packed(...) | <integer type> | transparent)]`
error: aborting due to 3 previous errors

View file

@ -0,0 +1,22 @@
//! This test checks the one code path that does not go through
//! the regular CTFE memory access (as an optimization). We forgot
//! to duplicate the static item self-initialization check, allowing
//! reading from the uninitialized static memory before it was
//! initialized at the end of the static initializer.
//!
//! https://github.com/rust-lang/rust/issues/142532
use std::mem::MaybeUninit;
pub static X: (i32, MaybeUninit<i32>) = (1, foo(&X.0));
//~^ ERROR: encountered static that tried to initialize itself with itself
const fn foo(x: &i32) -> MaybeUninit<i32> {
let mut temp = MaybeUninit::<i32>::uninit();
unsafe {
std::ptr::copy(x, temp.as_mut_ptr(), 1);
}
temp
}
fn main() {}

View file

@ -0,0 +1,17 @@
error[E0080]: encountered static that tried to initialize itself with itself
--> $DIR/read_before_init.rs:11:45
|
LL | pub static X: (i32, MaybeUninit<i32>) = (1, foo(&X.0));
| ^^^^^^^^^ evaluation of `X` failed inside this call
|
note: inside `foo`
--> $DIR/read_before_init.rs:17:9
|
LL | std::ptr::copy(x, temp.as_mut_ptr(), 1);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: inside `std::ptr::copy::<i32>`
--> $SRC_DIR/core/src/ptr/mod.rs:LL:COL
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0080`.