Merge pull request #4846 from rust-lang/rustup-2026-02-04

Automatic Rustup
This commit is contained in:
Ralf Jung 2026-02-04 07:27:42 +00:00 committed by GitHub
commit 1f73d3f30b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
444 changed files with 15140 additions and 4256 deletions

View file

@ -3543,7 +3543,6 @@ dependencies = [
"rustc_ast_pretty",
"rustc_errors",
"rustc_feature",
"rustc_fluent_macro",
"rustc_hir",
"rustc_lexer",
"rustc_macros",
@ -3684,7 +3683,6 @@ dependencies = [
"rustc_macros",
"rustc_metadata",
"rustc_middle",
"rustc_query_system",
"rustc_serialize",
"rustc_session",
"rustc_span",
@ -3780,7 +3778,6 @@ dependencies = [
"rustc_ast_lowering",
"rustc_ast_passes",
"rustc_ast_pretty",
"rustc_attr_parsing",
"rustc_borrowck",
"rustc_builtin_macros",
"rustc_codegen_ssa",
@ -3789,13 +3786,11 @@ dependencies = [
"rustc_errors",
"rustc_expand",
"rustc_feature",
"rustc_fluent_macro",
"rustc_hir_analysis",
"rustc_hir_pretty",
"rustc_hir_typeck",
"rustc_incremental",
"rustc_index",
"rustc_infer",
"rustc_interface",
"rustc_lexer",
"rustc_lint",
@ -3812,7 +3807,6 @@ dependencies = [
"rustc_pattern_analysis",
"rustc_privacy",
"rustc_public",
"rustc_query_system",
"rustc_resolve",
"rustc_session",
"rustc_span",
@ -4093,7 +4087,6 @@ version = "0.0.0"
dependencies = [
"rustc_data_structures",
"rustc_errors",
"rustc_fluent_macro",
"rustc_hir",
"rustc_index",
"rustc_macros",
@ -4229,6 +4222,8 @@ dependencies = [
name = "rustc_macros"
version = "0.0.0"
dependencies = [
"fluent-bundle",
"fluent-syntax",
"proc-macro2",
"quote",
"syn 2.0.110",
@ -4336,11 +4331,11 @@ dependencies = [
"polonius-engine",
"regex",
"rustc_abi",
"rustc_ast",
"rustc_data_structures",
"rustc_errors",
"rustc_fluent_macro",
"rustc_graphviz",
"rustc_hir",
"rustc_index",
"rustc_macros",
"rustc_middle",
@ -4570,7 +4565,6 @@ dependencies = [
"rustc_data_structures",
"rustc_errors",
"rustc_feature",
"rustc_fluent_macro",
"rustc_hashes",
"rustc_hir",
"rustc_index",

View file

@ -354,7 +354,13 @@ fn make_attr_token_stream(
FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] },
));
} else if let Some(delim) = kind.close_delim() {
let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
// If there's no matching opening delimiter, the token stream is malformed,
// likely due to a improper delimiter positions in the source code.
// It's not delimiter mismatch, and lexer can not detect it, so we just ignore it here.
let Some(frame) = stack_rest.pop() else {
return AttrTokenStream::new(stack_top.inner);
};
let frame_data = mem::replace(&mut stack_top, frame);
let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
assert!(
open_delim.eq_ignoring_invisible_origin(&delim),

View file

@ -10,7 +10,6 @@ rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_errors = { path = "../rustc_errors" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir = { path = "../rustc_hir" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_macros = { path = "../rustc_macros" }

View file

@ -1,246 +0,0 @@
attr_parsing_as_needed_compatibility =
linking modifier `as-needed` is only compatible with `dylib`, `framework` and `raw-dylib` linking kinds
attr_parsing_bundle_needs_static =
linking modifier `bundle` is only compatible with `static` linking kind
attr_parsing_cfg_attr_bad_delim = wrong `cfg_attr` delimiters
attr_parsing_deprecated_item_suggestion =
suggestions on deprecated items are unstable
.help = add `#![feature(deprecated_suggestion)]` to the crate root
.note = see #94785 for more details
attr_parsing_doc_alias_bad_char =
{$char_} character isn't allowed in {$attr_str}
attr_parsing_doc_alias_empty =
{$attr_str} attribute cannot have empty value
attr_parsing_doc_alias_malformed =
doc alias attribute expects a string `#[doc(alias = "a")]` or a list of strings `#[doc(alias("a", "b"))]`
attr_parsing_doc_alias_start_end =
{$attr_str} cannot start or end with ' '
attr_parsing_doc_attr_not_crate_level =
`#![doc({$attr_name} = "...")]` isn't allowed as a crate-level attribute
attr_parsing_doc_attribute_not_attribute =
nonexistent builtin attribute `{$attribute}` used in `#[doc(attribute = "...")]`
.help = only existing builtin attributes are allowed in core/std
attr_parsing_doc_keyword_not_keyword =
nonexistent keyword `{$keyword}` used in `#[doc(keyword = "...")]`
.help = only existing keywords are allowed in core/std
attr_parsing_empty_confusables =
expected at least one confusable name
attr_parsing_empty_link_name =
link name must not be empty
.label = empty link name
attr_parsing_expected_single_version_literal =
expected single version literal
attr_parsing_expected_version_literal =
expected a version literal
attr_parsing_expects_feature_list =
`{$name}` expects a list of feature names
attr_parsing_expects_features =
`{$name}` expects feature names
attr_parsing_import_name_type_raw =
import name type can only be used with link kind `raw-dylib`
attr_parsing_import_name_type_x86 =
import name type is only supported on x86
attr_parsing_incompatible_wasm_link =
`wasm_import_module` is incompatible with other arguments in `#[link]` attributes
attr_parsing_incorrect_repr_format_align_one_arg =
incorrect `repr(align)` attribute format: `align` takes exactly one argument in parentheses
attr_parsing_incorrect_repr_format_expect_literal_integer =
incorrect `repr(align)` attribute format: `align` expects a literal integer as argument
attr_parsing_incorrect_repr_format_generic =
incorrect `repr({$repr_arg})` attribute format
.suggestion = use parentheses instead
attr_parsing_incorrect_repr_format_packed_expect_integer =
incorrect `repr(packed)` attribute format: `packed` expects a literal integer as argument
attr_parsing_incorrect_repr_format_packed_one_or_zero_arg =
incorrect `repr(packed)` attribute format: `packed` takes exactly one parenthesized argument, or no parentheses at all
attr_parsing_invalid_alignment_value =
invalid alignment value: {$error_part}
attr_parsing_invalid_attr_unsafe = `{$name}` is not an unsafe attribute
.label = this is not an unsafe attribute
.suggestion = remove the `unsafe(...)`
.note = extraneous unsafe is not allowed in attributes
attr_parsing_invalid_issue_string =
`issue` must be a non-zero numeric string or "none"
.must_not_be_zero = `issue` must not be "0", use "none" instead
.empty = cannot parse integer from empty string
.invalid_digit = invalid digit found in string
.pos_overflow = number too large to fit in target type
.neg_overflow = number too small to fit in target type
attr_parsing_invalid_link_modifier =
invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed
attr_parsing_invalid_meta_item = expected a literal (`1u8`, `1.0f32`, `"string"`, etc.) here, found {$descr}
.remove_neg_sugg = negative numbers are not literals, try removing the `-` sign
.quote_ident_sugg = surround the identifier with quotation marks to make it into a string literal
.label = {$descr}s are not allowed here
attr_parsing_invalid_predicate =
invalid predicate `{$predicate}`
attr_parsing_invalid_repr_align_need_arg =
invalid `repr(align)` attribute: `align` needs an argument
.suggestion = supply an argument here
attr_parsing_invalid_repr_generic =
invalid `repr({$repr_arg})` attribute: {$error_part}
attr_parsing_invalid_repr_hint_no_paren =
invalid representation hint: `{$name}` does not take a parenthesized argument list
attr_parsing_invalid_repr_hint_no_value =
invalid representation hint: `{$name}` does not take a value
attr_parsing_invalid_since =
'since' must be a Rust version number, such as "1.31.0"
attr_parsing_invalid_target = `#[{$name}]` attribute cannot be used on {$target}
.help = `#[{$name}]` can {$only}be applied to {$applied}
.suggestion = remove the attribute
attr_parsing_limit_invalid =
`limit` must be a non-negative integer
.label = {$error_str}
attr_parsing_link_arg_unstable =
link kind `link-arg` is unstable
attr_parsing_link_cfg_unstable =
link cfg is unstable
attr_parsing_link_framework_apple =
link kind `framework` is only supported on Apple targets
attr_parsing_link_ordinal_out_of_range = ordinal value in `link_ordinal` is too large: `{$ordinal}`
.note = the value may not exceed `u16::MAX`
attr_parsing_link_requires_name =
`#[link]` attribute requires a `name = "string"` argument
.label = missing `name` argument
attr_parsing_meta_bad_delim = wrong meta list delimiters
attr_parsing_meta_bad_delim_suggestion = the delimiters should be `(` and `)`
attr_parsing_missing_feature =
missing 'feature'
attr_parsing_missing_issue =
missing 'issue'
attr_parsing_missing_note =
missing 'note'
attr_parsing_missing_since =
missing 'since'
attr_parsing_multiple_modifiers =
multiple `{$modifier}` modifiers in a single `modifiers` argument
attr_parsing_multiple_stability_levels =
multiple stability levels
attr_parsing_naked_functions_incompatible_attribute =
attribute incompatible with `#[unsafe(naked)]`
.label = the `{$attr}` attribute is incompatible with `#[unsafe(naked)]`
.naked_attribute = function marked with `#[unsafe(naked)]` here
attr_parsing_non_ident_feature =
'feature' is not an identifier
attr_parsing_null_on_export = `export_name` may not contain null characters
attr_parsing_null_on_link_section = `link_section` may not contain null characters
attr_parsing_null_on_objc_class = `objc::class!` may not contain null characters
attr_parsing_null_on_objc_selector = `objc::selector!` may not contain null characters
attr_parsing_objc_class_expected_string_literal = `objc::class!` expected a string literal
attr_parsing_objc_selector_expected_string_literal = `objc::selector!` expected a string literal
attr_parsing_raw_dylib_elf_unstable =
link kind `raw-dylib` is unstable on ELF platforms
attr_parsing_raw_dylib_no_nul =
link name must not contain NUL characters if link kind is `raw-dylib`
attr_parsing_raw_dylib_only_windows =
link kind `raw-dylib` is only supported on Windows targets
attr_parsing_repr_ident =
meta item in `repr` must be an identifier
attr_parsing_rustc_allowed_unstable_pairing =
`rustc_allowed_through_unstable_modules` attribute must be paired with a `stable` attribute
attr_parsing_rustc_promotable_pairing =
`rustc_promotable` attribute must be paired with either a `rustc_const_unstable` or a `rustc_const_stable` attribute
attr_parsing_rustc_scalable_vector_count_out_of_range = element count in `rustc_scalable_vector` is too large: `{$n}`
.note = the value may not exceed `u16::MAX`
attr_parsing_soft_no_args =
`soft` should not have any arguments
attr_parsing_stability_outside_std = stability attributes may not be used outside of the standard library
attr_parsing_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
.help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
attr_parsing_unknown_version_literal =
unknown version literal format, assuming it refers to a future version
attr_parsing_unrecognized_repr_hint =
unrecognized representation hint
.help = valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize`
.note = for more information, visit <https://doc.rust-lang.org/reference/type-layout.html?highlight=repr#representations>
attr_parsing_unsafe_attr_outside_unsafe = unsafe attribute used without unsafe
.label = usage of unsafe attribute
attr_parsing_unsafe_attr_outside_unsafe_suggestion = wrap the attribute in `unsafe(...)`
attr_parsing_unstable_cfg_target_compact =
compact `cfg(target(..))` is experimental and subject to change
attr_parsing_unstable_feature_bound_incompatible_stability = item annotated with `#[unstable_feature_bound]` should not be stable
.help = if this item is meant to be stable, do not use any functions annotated with `#[unstable_feature_bound]`. Otherwise, mark this item as unstable with `#[unstable]`
attr_parsing_unsupported_instruction_set = target `{$current_target}` does not support `#[instruction_set({$instruction_set}::*)]`
attr_parsing_unsupported_literal_suggestion =
consider removing the prefix
attr_parsing_unused_multiple =
multiple `{$name}` attributes
.suggestion = remove this attribute
.note = attribute also specified here
attr_parsing_whole_archive_needs_static =
linking modifier `whole-archive` is only compatible with `static` linking kind

View file

@ -3,14 +3,14 @@ use std::convert::identity;
use rustc_ast::token::Delimiter;
use rustc_ast::tokenstream::DelimSpan;
use rustc_ast::{AttrItem, Attribute, CRATE_NODE_ID, LitKind, ast, token};
use rustc_errors::{Applicability, PResult};
use rustc_errors::{Applicability, PResult, inline_fluent};
use rustc_feature::{
AttrSuggestionStyle, AttributeTemplate, Features, GatedCfg, find_gated_cfg, template,
};
use rustc_hir::attrs::CfgEntry;
use rustc_hir::lints::AttributeLintKind;
use rustc_hir::{AttrPath, RustcVersion, Target};
use rustc_parse::parser::{ForceCollect, Parser};
use rustc_parse::parser::{ForceCollect, Parser, Recovery};
use rustc_parse::{exp, parse_in};
use rustc_session::Session;
use rustc_session::config::ExpectedValues;
@ -25,7 +25,7 @@ use crate::session_diagnostics::{
AttributeParseError, AttributeParseErrorReason, CfgAttrBadDelim, MetaBadDelimSugg,
ParsedDescription,
};
use crate::{AttributeParser, fluent_generated, parse_version, session_diagnostics};
use crate::{AttributeParser, parse_version, session_diagnostics};
pub const CFG_TEMPLATE: AttributeTemplate = template!(
List: &["predicate"],
@ -141,7 +141,7 @@ fn parse_cfg_entry_target<S: Stage>(
cx.sess(),
sym::cfg_target_compact,
meta_span,
fluent_generated::attr_parsing_unstable_cfg_target_compact,
inline_fluent!("compact `cfg(target(..))` is experimental and subject to change"),
)
.emit();
}
@ -360,8 +360,10 @@ fn parse_cfg_attr_internal<'a>(
) -> PResult<'a, (CfgEntry, Vec<(ast::AttrItem, Span)>)> {
// Parse cfg predicate
let pred_start = parser.token.span;
let meta =
MetaItemOrLitParser::parse_single(parser, ShouldEmit::ErrorsAndLints { recover: true })?;
let meta = MetaItemOrLitParser::parse_single(
parser,
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
)?;
let pred_span = pred_start.with_hi(parser.token.span.hi());
let cfg_predicate = AttributeParser::parse_single_args(
@ -376,7 +378,7 @@ fn parse_cfg_attr_internal<'a>(
CRATE_NODE_ID,
Target::Crate,
features,
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
&meta,
parse_cfg_entry,
&CFG_ATTR_TEMPLATE,

View file

@ -5,7 +5,7 @@ use rustc_feature::{AttributeTemplate, Features};
use rustc_hir::attrs::CfgEntry;
use rustc_hir::{AttrPath, Target};
use rustc_parse::exp;
use rustc_parse::parser::Parser;
use rustc_parse::parser::{Parser, Recovery};
use rustc_session::Session;
use rustc_span::{ErrorGuaranteed, Span, sym};
@ -78,9 +78,11 @@ pub fn parse_cfg_select(
}
}
} else {
let meta =
MetaItemOrLitParser::parse_single(p, ShouldEmit::ErrorsAndLints { recover: true })
.map_err(|diag| diag.emit())?;
let meta = MetaItemOrLitParser::parse_single(
p,
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
)
.map_err(|diag| diag.emit())?;
let cfg_span = meta.span();
let cfg = AttributeParser::parse_single_args(
sess,
@ -95,7 +97,7 @@ pub fn parse_cfg_select(
// Doesn't matter what the target actually is here.
Target::Crate,
features,
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
&meta,
parse_cfg_entry,
&AttributeTemplate::default(),

View file

@ -274,3 +274,12 @@ impl<S: Stage> NoArgsAttributeParser<S> for NoBuiltinsParser {
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::NoBuiltins;
}
pub(crate) struct RustcPreserveUbChecksParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcPreserveUbChecksParser {
const PATH: &[Symbol] = &[sym::rustc_preserve_ub_checks];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcPreserveUbChecks;
}

View file

@ -1,3 +1,4 @@
use rustc_errors::inline_fluent;
use rustc_feature::Features;
use rustc_hir::attrs::AttributeKind::{LinkName, LinkOrdinal, LinkSection};
use rustc_hir::attrs::*;
@ -10,7 +11,6 @@ use rustc_target::spec::{Arch, BinaryFormat};
use super::prelude::*;
use super::util::parse_single_integer;
use crate::attributes::cfg::parse_cfg_entry;
use crate::fluent_generated;
use crate::session_diagnostics::{
AsNeededCompatibility, BundleNeedsStatic, EmptyLinkName, ImportNameTypeRaw, ImportNameTypeX86,
IncompatibleWasmLink, InvalidLinkModifier, LinkFrameworkApple, LinkOrdinalOutOfRange,
@ -305,7 +305,7 @@ impl LinkParser {
sess,
sym::raw_dylib_elf,
nv.value_span,
fluent_generated::attr_parsing_raw_dylib_elf_unstable,
inline_fluent!("link kind `raw-dylib` is unstable on ELF platforms"),
)
.emit();
} else {
@ -320,7 +320,7 @@ impl LinkParser {
sess,
sym::link_arg_attribute,
nv.value_span,
fluent_generated::attr_parsing_link_arg_unstable,
inline_fluent!("link kind `link-arg` is unstable"),
)
.emit();
}
@ -385,13 +385,8 @@ impl LinkParser {
return true;
};
if !features.link_cfg() {
feature_err(
sess,
sym::link_cfg,
item.span(),
fluent_generated::attr_parsing_link_cfg_unstable,
)
.emit();
feature_err(sess, sym::link_cfg, item.span(), inline_fluent!("link cfg is unstable"))
.emit();
}
*cfg = parse_cfg_entry(cx, link_cfg).ok();
true

View file

@ -7,36 +7,36 @@ use crate::attributes::{NoArgsAttributeParser, OnDuplicate};
use crate::context::Stage;
use crate::target_checking::AllowedTargets;
pub(crate) struct RustcDumpUserArgs;
pub(crate) struct RustcDumpUserArgsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpUserArgs {
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpUserArgsParser {
const PATH: &[Symbol] = &[sym::rustc_dump_user_args];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpUserArgs;
}
pub(crate) struct RustcDumpDefParents;
pub(crate) struct RustcDumpDefParentsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpDefParents {
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpDefParentsParser {
const PATH: &[Symbol] = &[sym::rustc_dump_def_parents];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpDefParents;
}
pub(crate) struct RustcDumpItemBounds;
pub(crate) struct RustcDumpItemBoundsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpItemBounds {
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpItemBoundsParser {
const PATH: &[Symbol] = &[sym::rustc_dump_item_bounds];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::AssocTy)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpItemBounds;
}
pub(crate) struct RustcDumpPredicates;
pub(crate) struct RustcDumpPredicatesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpPredicates {
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpPredicatesParser {
const PATH: &[Symbol] = &[sym::rustc_dump_predicates];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
@ -49,9 +49,9 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpPredicates {
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpPredicates;
}
pub(crate) struct RustcDumpVtable;
pub(crate) struct RustcDumpVtableParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpVtable {
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpVtableParser {
const PATH: &[Symbol] = &[sym::rustc_dump_vtable];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[

View file

@ -1,5 +1,7 @@
use std::path::PathBuf;
use rustc_ast::{LitIntType, LitKind, MetaItemLit};
use rustc_hir::attrs::RustcLayoutType;
use rustc_hir::attrs::{BorrowckGraphvizFormatKind, RustcLayoutType, RustcMirKind};
use rustc_session::errors;
use super::prelude::*;
@ -307,6 +309,14 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcHasIncoherentInherentImplsParse
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcHasIncoherentInherentImpls;
}
pub(crate) struct RustcHiddenTypeOfOpaquesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcHiddenTypeOfOpaquesParser {
const PATH: &[Symbol] = &[sym::rustc_hidden_type_of_opaques];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcHiddenTypeOfOpaques;
}
pub(crate) struct RustcNounwindParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcNounwindParser {
@ -349,7 +359,6 @@ impl<S: Stage> CombineAttributeParser<S> for RustcLayoutParser {
const TEMPLATE: AttributeTemplate =
template!(List: &["abi", "align", "size", "homogenous_aggregate", "debug"]);
fn extend(
cx: &mut AcceptContext<'_, '_, S>,
args: &ArgParser,
@ -389,6 +398,94 @@ impl<S: Stage> CombineAttributeParser<S> for RustcLayoutParser {
}
}
pub(crate) struct RustcMirParser;
impl<S: Stage> CombineAttributeParser<S> for RustcMirParser {
const PATH: &[rustc_span::Symbol] = &[sym::rustc_mir];
type Item = RustcMirKind;
const CONVERT: ConvertFn<Self::Item> = |items, _| AttributeKind::RustcMir(items);
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Fn),
Allow(Target::Method(MethodKind::Inherent)),
Allow(Target::Method(MethodKind::TraitImpl)),
Allow(Target::Method(MethodKind::Trait { body: false })),
Allow(Target::Method(MethodKind::Trait { body: true })),
]);
const TEMPLATE: AttributeTemplate = template!(List: &["arg1, arg2, ..."]);
fn extend(
cx: &mut AcceptContext<'_, '_, S>,
args: &ArgParser,
) -> impl IntoIterator<Item = Self::Item> {
let Some(list) = args.list() else {
cx.expected_list(cx.attr_span, args);
return ThinVec::new();
};
list.mixed()
.filter_map(|arg| arg.meta_item())
.filter_map(|mi| {
if let Some(ident) = mi.ident() {
match ident.name {
sym::rustc_peek_maybe_init => Some(RustcMirKind::PeekMaybeInit),
sym::rustc_peek_maybe_uninit => Some(RustcMirKind::PeekMaybeUninit),
sym::rustc_peek_liveness => Some(RustcMirKind::PeekLiveness),
sym::stop_after_dataflow => Some(RustcMirKind::StopAfterDataflow),
sym::borrowck_graphviz_postflow => {
let Some(nv) = mi.args().name_value() else {
cx.expected_name_value(
mi.span(),
Some(sym::borrowck_graphviz_postflow),
);
return None;
};
let Some(path) = nv.value_as_str() else {
cx.expected_string_literal(nv.value_span, None);
return None;
};
let path = PathBuf::from(path.to_string());
if path.file_name().is_some() {
Some(RustcMirKind::BorrowckGraphvizPostflow { path })
} else {
cx.expected_filename_literal(nv.value_span);
None
}
}
sym::borrowck_graphviz_format => {
let Some(nv) = mi.args().name_value() else {
cx.expected_name_value(
mi.span(),
Some(sym::borrowck_graphviz_format),
);
return None;
};
let Some(format) = nv.value_as_ident() else {
cx.expected_identifier(nv.value_span);
return None;
};
match format.name {
sym::two_phase => Some(RustcMirKind::BorrowckGraphvizFormat {
format: BorrowckGraphvizFormatKind::TwoPhase,
}),
_ => {
cx.expected_specific_argument(format.span, &[sym::two_phase]);
None
}
}
}
_ => None,
}
} else {
None
}
})
.collect()
}
}
pub(crate) struct RustcNonConstTraitMethodParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcNonConstTraitMethodParser {

View file

@ -10,6 +10,7 @@ use rustc_feature::{AttrSuggestionStyle, AttributeTemplate};
use rustc_hir::attrs::AttributeKind;
use rustc_hir::lints::AttributeLintKind;
use rustc_hir::{AttrPath, HirId};
use rustc_parse::parser::Recovery;
use rustc_session::Session;
use rustc_session::lint::{Lint, LintId};
use rustc_span::{ErrorGuaranteed, Span, Symbol};
@ -31,7 +32,8 @@ use crate::attributes::crate_level::{
CrateNameParser, CrateTypeParser, MoveSizeLimitParser, NeedsPanicRuntimeParser,
NoBuiltinsParser, NoCoreParser, NoMainParser, NoStdParser, PanicRuntimeParser,
PatternComplexityLimitParser, ProfilerRuntimeParser, RecursionLimitParser,
RustcCoherenceIsCoreParser, TypeLengthLimitParser, WindowsSubsystemParser,
RustcCoherenceIsCoreParser, RustcPreserveUbChecksParser, TypeLengthLimitParser,
WindowsSubsystemParser,
};
use crate::attributes::debugger::DebuggerViualizerParser;
use crate::attributes::deprecation::DeprecationParser;
@ -71,17 +73,18 @@ use crate::attributes::rustc_allocator::{
RustcDeallocatorParser, RustcReallocatorParser,
};
use crate::attributes::rustc_dump::{
RustcDumpDefParents, RustcDumpItemBounds, RustcDumpPredicates, RustcDumpUserArgs,
RustcDumpVtable,
RustcDumpDefParentsParser, RustcDumpItemBoundsParser, RustcDumpPredicatesParser,
RustcDumpUserArgsParser, RustcDumpVtableParser,
};
use crate::attributes::rustc_internal::{
RustcHasIncoherentInherentImplsParser, RustcLayoutParser, RustcLayoutScalarValidRangeEndParser,
RustcLayoutScalarValidRangeStartParser, RustcLegacyConstGenericsParser,
RustcLintOptDenyFieldAccessParser, RustcLintOptTyParser, RustcLintQueryInstabilityParser,
RustcLintUntrackedQueryInformationParser, RustcMainParser, RustcMustImplementOneOfParser,
RustcNeverReturnsNullPointerParser, RustcNoImplicitAutorefsParser,
RustcNonConstTraitMethodParser, RustcNounwindParser, RustcObjectLifetimeDefaultParser,
RustcOffloadKernelParser, RustcScalableVectorParser, RustcSimdMonomorphizeLaneLimitParser,
RustcHasIncoherentInherentImplsParser, RustcHiddenTypeOfOpaquesParser, RustcLayoutParser,
RustcLayoutScalarValidRangeEndParser, RustcLayoutScalarValidRangeStartParser,
RustcLegacyConstGenericsParser, RustcLintOptDenyFieldAccessParser, RustcLintOptTyParser,
RustcLintQueryInstabilityParser, RustcLintUntrackedQueryInformationParser, RustcMainParser,
RustcMirParser, RustcMustImplementOneOfParser, RustcNeverReturnsNullPointerParser,
RustcNoImplicitAutorefsParser, RustcNonConstTraitMethodParser, RustcNounwindParser,
RustcObjectLifetimeDefaultParser, RustcOffloadKernelParser, RustcScalableVectorParser,
RustcSimdMonomorphizeLaneLimitParser,
};
use crate::attributes::semantics::MayDangleParser;
use crate::attributes::stability::{
@ -199,6 +202,7 @@ attribute_parsers!(
Combine<LinkParser>,
Combine<ReprParser>,
Combine<RustcLayoutParser>,
Combine<RustcMirParser>,
Combine<TargetFeatureParser>,
Combine<UnstableFeatureBoundParser>,
// tidy-alphabetical-end
@ -293,12 +297,13 @@ attribute_parsers!(
Single<WithoutArgs<RustcAllocatorZeroedParser>>,
Single<WithoutArgs<RustcCoherenceIsCoreParser>>,
Single<WithoutArgs<RustcDeallocatorParser>>,
Single<WithoutArgs<RustcDumpDefParents>>,
Single<WithoutArgs<RustcDumpItemBounds>>,
Single<WithoutArgs<RustcDumpPredicates>>,
Single<WithoutArgs<RustcDumpUserArgs>>,
Single<WithoutArgs<RustcDumpVtable>>,
Single<WithoutArgs<RustcDumpDefParentsParser>>,
Single<WithoutArgs<RustcDumpItemBoundsParser>>,
Single<WithoutArgs<RustcDumpPredicatesParser>>,
Single<WithoutArgs<RustcDumpUserArgsParser>>,
Single<WithoutArgs<RustcDumpVtableParser>>,
Single<WithoutArgs<RustcHasIncoherentInherentImplsParser>>,
Single<WithoutArgs<RustcHiddenTypeOfOpaquesParser>>,
Single<WithoutArgs<RustcLintOptTyParser>>,
Single<WithoutArgs<RustcLintQueryInstabilityParser>>,
Single<WithoutArgs<RustcLintUntrackedQueryInformationParser>>,
@ -309,6 +314,7 @@ attribute_parsers!(
Single<WithoutArgs<RustcNounwindParser>>,
Single<WithoutArgs<RustcOffloadKernelParser>>,
Single<WithoutArgs<RustcPassIndirectlyInNonRusticAbisParser>>,
Single<WithoutArgs<RustcPreserveUbChecksParser>>,
Single<WithoutArgs<RustcReallocatorParser>>,
Single<WithoutArgs<RustcShouldNotBeCalledOnConstItems>>,
Single<WithoutArgs<RustcVarianceOfOpaquesParser>>,
@ -383,7 +389,7 @@ impl Stage for Late {
}
fn should_emit(&self) -> ShouldEmit {
ShouldEmit::ErrorsAndLints { recover: true }
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed }
}
}
@ -512,6 +518,11 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
)
}
/// Error that a filename string literal was expected.
pub(crate) fn expected_filename_literal(&self, span: Span) {
self.emit_parse_error(span, AttributeParseErrorReason::ExpectedFilenameLiteral);
}
pub(crate) fn expected_integer_literal(&self, span: Span) -> ErrorGuaranteed {
self.emit_parse_error(span, AttributeParseErrorReason::ExpectedIntegerLiteral)
}
@ -770,10 +781,10 @@ pub enum ShouldEmit {
ErrorsAndLints {
/// Whether [`ArgParser`] will attempt to recover from errors.
///
/// If true, it will attempt to recover from bad input (like an invalid literal). Setting
/// this to false will instead return early, and not raise errors except at the top level
/// (in [`ArgParser::from_attr_args`]).
recover: bool,
/// Whether it is allowed to recover from bad input (like an invalid literal). Setting
/// this to `Forbidden` will instead return early, and not raise errors except at the top
/// level (in [`ArgParser::from_attr_args`]).
recovery: Recovery,
},
/// The operation will *not* emit errors and lints.
///

View file

@ -113,5 +113,3 @@ pub use attributes::util::{is_builtin_attr, parse_version};
pub use context::{Early, Late, OmitDoc, ShouldEmit};
pub use interface::AttributeParser;
pub use session_diagnostics::ParsedDescription;
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }

View file

@ -15,7 +15,7 @@ use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, PResult};
use rustc_hir::{self as hir, AttrPath};
use rustc_parse::exp;
use rustc_parse::parser::{ForceCollect, Parser, PathStyle, token_descr};
use rustc_parse::parser::{ForceCollect, Parser, PathStyle, Recovery, token_descr};
use rustc_session::errors::create_lit_error;
use rustc_session::parse::ParseSess;
use rustc_span::{Ident, Span, Symbol, sym};
@ -121,7 +121,7 @@ impl ArgParser {
&args.tokens,
args.dspan.entire(),
psess,
ShouldEmit::ErrorsAndLints { recover: false },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Forbidden },
) {
Ok(p) => return Some(ArgParser::List(p)),
Err(e) => {
@ -373,7 +373,10 @@ fn expr_to_lit<'sess>(
}
Err(err) => {
let err = create_lit_error(psess, err, token_lit, expr.span);
if matches!(should_emit, ShouldEmit::ErrorsAndLints { recover: false }) {
if matches!(
should_emit,
ShouldEmit::ErrorsAndLints { recovery: Recovery::Forbidden }
) {
Err(err)
} else {
let lit = MetaItemLit {
@ -431,7 +434,10 @@ impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> {
if !lit.kind.is_unsuffixed() {
// Emit error and continue, we can still parse the attribute as if the suffix isn't there
let err = self.parser.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span });
if matches!(self.should_emit, ShouldEmit::ErrorsAndLints { recover: false }) {
if matches!(
self.should_emit,
ShouldEmit::ErrorsAndLints { recovery: Recovery::Forbidden }
) {
return Err(err);
} else {
self.should_emit.emit_err(err)
@ -569,6 +575,10 @@ impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> {
should_emit: ShouldEmit,
) -> PResult<'sess, MetaItemListParser> {
let mut parser = Parser::new(psess, tokens, None);
if let ShouldEmit::ErrorsAndLints { recovery } = should_emit {
parser = parser.recovery(recovery);
}
let mut this = MetaItemListParserContext { parser: &mut parser, should_emit };
// Presumably, the majority of the time there will only be one attr.

View file

@ -11,10 +11,8 @@ use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_span::{Span, Symbol};
use rustc_target::spec::TargetTuple;
use crate::fluent_generated as fluent;
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_predicate, code = E0537)]
#[diag("invalid predicate `{$predicate}`", code = E0537)]
pub(crate) struct InvalidPredicate {
#[primary_span]
pub span: Span,
@ -23,7 +21,7 @@ pub(crate) struct InvalidPredicate {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_alias_empty)]
#[diag("{$attr_str} attribute cannot have empty value")]
pub(crate) struct DocAliasEmpty<'a> {
#[primary_span]
pub span: Span,
@ -31,7 +29,7 @@ pub(crate) struct DocAliasEmpty<'a> {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_alias_bad_char)]
#[diag("{$char_} character isn't allowed in {$attr_str}")]
pub(crate) struct DocAliasBadChar<'a> {
#[primary_span]
pub span: Span,
@ -40,7 +38,7 @@ pub(crate) struct DocAliasBadChar<'a> {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_alias_start_end)]
#[diag("{$attr_str} cannot start or end with ' '")]
pub(crate) struct DocAliasStartEnd<'a> {
#[primary_span]
pub span: Span,
@ -48,7 +46,7 @@ pub(crate) struct DocAliasStartEnd<'a> {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_attr_not_crate_level)]
#[diag("`#![doc({$attr_name} = \"...\")]` isn't allowed as a crate-level attribute")]
pub(crate) struct DocAttrNotCrateLevel {
#[primary_span]
pub span: Span,
@ -56,8 +54,8 @@ pub(crate) struct DocAttrNotCrateLevel {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_keyword_not_keyword)]
#[help]
#[diag("nonexistent keyword `{$keyword}` used in `#[doc(keyword = \"...\")]`")]
#[help("only existing keywords are allowed in core/std")]
pub(crate) struct DocKeywordNotKeyword {
#[primary_span]
pub span: Span,
@ -65,8 +63,8 @@ pub(crate) struct DocKeywordNotKeyword {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_attribute_not_attribute)]
#[help]
#[diag("nonexistent builtin attribute `{$attribute}` used in `#[doc(attribute = \"...\")]`")]
#[help("only existing builtin attributes are allowed in core/std")]
pub(crate) struct DocAttributeNotAttribute {
#[primary_span]
pub span: Span,
@ -74,28 +72,28 @@ pub(crate) struct DocAttributeNotAttribute {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_missing_since, code = E0542)]
#[diag("missing 'since'", code = E0542)]
pub(crate) struct MissingSince {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_missing_note, code = E0543)]
#[diag("missing 'note'", code = E0543)]
pub(crate) struct MissingNote {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_multiple_stability_levels, code = E0544)]
#[diag("multiple stability levels", code = E0544)]
pub(crate) struct MultipleStabilityLevels {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_issue_string, code = E0545)]
#[diag("`issue` must be a non-zero numeric string or \"none\"", code = E0545)]
pub(crate) struct InvalidIssueString {
#[primary_span]
pub span: Span,
@ -108,31 +106,31 @@ pub(crate) struct InvalidIssueString {
// translatable.
#[derive(Subdiagnostic)]
pub(crate) enum InvalidIssueStringCause {
#[label(attr_parsing_must_not_be_zero)]
#[label("`issue` must not be \"0\", use \"none\" instead")]
MustNotBeZero {
#[primary_span]
span: Span,
},
#[label(attr_parsing_empty)]
#[label("cannot parse integer from empty string")]
Empty {
#[primary_span]
span: Span,
},
#[label(attr_parsing_invalid_digit)]
#[label("invalid digit found in string")]
InvalidDigit {
#[primary_span]
span: Span,
},
#[label(attr_parsing_pos_overflow)]
#[label("number too large to fit in target type")]
PosOverflow {
#[primary_span]
span: Span,
},
#[label(attr_parsing_neg_overflow)]
#[label("number too small to fit in target type")]
NegOverflow {
#[primary_span]
span: Span,
@ -153,21 +151,21 @@ impl InvalidIssueStringCause {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_missing_feature, code = E0546)]
#[diag("missing 'feature'", code = E0546)]
pub(crate) struct MissingFeature {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_non_ident_feature, code = E0546)]
#[diag("'feature' is not an identifier", code = E0546)]
pub(crate) struct NonIdentFeature {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_missing_issue, code = E0547)]
#[diag("missing 'issue'", code = E0547)]
pub(crate) struct MissingIssue {
#[primary_span]
pub span: Span,
@ -176,20 +174,20 @@ pub(crate) struct MissingIssue {
// FIXME: Why is this the same error code as `InvalidReprHintNoParen` and `InvalidReprHintNoValue`?
// It is more similar to `IncorrectReprFormatGeneric`.
#[derive(Diagnostic)]
#[diag(attr_parsing_incorrect_repr_format_packed_one_or_zero_arg, code = E0552)]
#[diag("incorrect `repr(packed)` attribute format: `packed` takes exactly one parenthesized argument, or no parentheses at all", code = E0552)]
pub(crate) struct IncorrectReprFormatPackedOneOrZeroArg {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_incorrect_repr_format_packed_expect_integer, code = E0552)]
#[diag("incorrect `repr(packed)` attribute format: `packed` expects a literal integer as argument", code = E0552)]
pub(crate) struct IncorrectReprFormatPackedExpectInteger {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_repr_hint_no_paren, code = E0552)]
#[diag("invalid representation hint: `{$name}` does not take a parenthesized argument list", code = E0552)]
pub(crate) struct InvalidReprHintNoParen {
#[primary_span]
pub span: Span,
@ -198,7 +196,7 @@ pub(crate) struct InvalidReprHintNoParen {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_repr_hint_no_value, code = E0552)]
#[diag("invalid representation hint: `{$name}` does not take a value", code = E0552)]
pub(crate) struct InvalidReprHintNoValue {
#[primary_span]
pub span: Span,
@ -207,15 +205,19 @@ pub(crate) struct InvalidReprHintNoValue {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_repr_align_need_arg, code = E0589)]
#[diag("invalid `repr(align)` attribute: `align` needs an argument", code = E0589)]
pub(crate) struct InvalidReprAlignNeedArg {
#[primary_span]
#[suggestion(code = "align(...)", applicability = "has-placeholders")]
#[suggestion(
"supply an argument here",
code = "align(...)",
applicability = "has-placeholders"
)]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_repr_generic, code = E0589)]
#[diag("invalid `repr({$repr_arg})` attribute: {$error_part}", code = E0589)]
pub(crate) struct InvalidReprGeneric<'a> {
#[primary_span]
pub span: Span,
@ -225,21 +227,21 @@ pub(crate) struct InvalidReprGeneric<'a> {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_incorrect_repr_format_align_one_arg, code = E0693)]
#[diag("incorrect `repr(align)` attribute format: `align` takes exactly one argument in parentheses", code = E0693)]
pub(crate) struct IncorrectReprFormatAlignOneArg {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_incorrect_repr_format_expect_literal_integer, code = E0693)]
#[diag("incorrect `repr(align)` attribute format: `align` expects a literal integer as argument", code = E0693)]
pub(crate) struct IncorrectReprFormatExpectInteger {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_incorrect_repr_format_generic, code = E0693)]
#[diag("incorrect `repr({$repr_arg})` attribute format", code = E0693)]
pub(crate) struct IncorrectReprFormatGeneric {
#[primary_span]
pub span: Span,
@ -253,7 +255,7 @@ pub(crate) struct IncorrectReprFormatGeneric {
#[derive(Subdiagnostic)]
pub(crate) enum IncorrectReprFormatGenericCause {
#[suggestion(
attr_parsing_suggestion,
"use parentheses instead",
code = "{name}({value})",
applicability = "machine-applicable"
)]
@ -269,7 +271,7 @@ pub(crate) enum IncorrectReprFormatGenericCause {
},
#[suggestion(
attr_parsing_suggestion,
"use parentheses instead",
code = "{name}({value})",
applicability = "machine-applicable"
)]
@ -298,48 +300,48 @@ impl IncorrectReprFormatGenericCause {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_rustc_promotable_pairing, code = E0717)]
#[diag("`rustc_promotable` attribute must be paired with either a `rustc_const_unstable` or a `rustc_const_stable` attribute", code = E0717)]
pub(crate) struct RustcPromotablePairing {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_rustc_allowed_unstable_pairing, code = E0789)]
#[diag("`rustc_allowed_through_unstable_modules` attribute must be paired with a `stable` attribute", code = E0789)]
pub(crate) struct RustcAllowedUnstablePairing {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_deprecated_item_suggestion)]
#[diag("suggestions on deprecated items are unstable")]
pub(crate) struct DeprecatedItemSuggestion {
#[primary_span]
pub span: Span,
#[help]
#[help("add `#![feature(deprecated_suggestion)]` to the crate root")]
pub is_nightly: bool,
#[note]
#[note("see #94785 for more details")]
pub details: (),
}
#[derive(Diagnostic)]
#[diag(attr_parsing_expected_single_version_literal)]
#[diag("expected single version literal")]
pub(crate) struct ExpectedSingleVersionLiteral {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_expected_version_literal)]
#[diag("expected a version literal")]
pub(crate) struct ExpectedVersionLiteral {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_expects_feature_list)]
#[diag("`{$name}` expects a list of feature names")]
pub(crate) struct ExpectsFeatureList {
#[primary_span]
pub span: Span,
@ -348,7 +350,7 @@ pub(crate) struct ExpectsFeatureList {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_expects_features)]
#[diag("`{$name}` expects feature names")]
pub(crate) struct ExpectsFeatures {
#[primary_span]
pub span: Span,
@ -357,21 +359,21 @@ pub(crate) struct ExpectsFeatures {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_since)]
#[diag("'since' must be a Rust version number, such as \"1.31.0\"")]
pub(crate) struct InvalidSince {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_soft_no_args)]
#[diag("`soft` should not have any arguments")]
pub(crate) struct SoftNoArgs {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_unknown_version_literal)]
#[diag("unknown version literal format, assuming it refers to a future version")]
pub(crate) struct UnknownVersionLiteral {
#[primary_span]
pub span: Span,
@ -379,78 +381,83 @@ pub(crate) struct UnknownVersionLiteral {
// FIXME(jdonszelmann) duplicated from `rustc_passes`, remove once `check_attr` is integrated.
#[derive(Diagnostic)]
#[diag(attr_parsing_unused_multiple)]
#[diag("multiple `{$name}` attributes")]
pub(crate) struct UnusedMultiple {
#[primary_span]
#[suggestion(code = "", applicability = "machine-applicable")]
#[suggestion("remove this attribute", code = "", applicability = "machine-applicable")]
pub this: Span,
#[note]
#[note("attribute also specified here")]
pub other: Span,
pub name: Symbol,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_null_on_export, code = E0648)]
#[diag("`export_name` may not contain null characters", code = E0648)]
pub(crate) struct NullOnExport {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_null_on_link_section, code = E0648)]
#[diag("`link_section` may not contain null characters", code = E0648)]
pub(crate) struct NullOnLinkSection {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_null_on_objc_class)]
#[diag("`objc::class!` may not contain null characters")]
pub(crate) struct NullOnObjcClass {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_null_on_objc_selector)]
#[diag("`objc::selector!` may not contain null characters")]
pub(crate) struct NullOnObjcSelector {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_objc_class_expected_string_literal)]
#[diag("`objc::class!` expected a string literal")]
pub(crate) struct ObjcClassExpectedStringLiteral {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_objc_selector_expected_string_literal)]
#[diag("`objc::selector!` expected a string literal")]
pub(crate) struct ObjcSelectorExpectedStringLiteral {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_stability_outside_std, code = E0734)]
#[diag("stability attributes may not be used outside of the standard library", code = E0734)]
pub(crate) struct StabilityOutsideStd {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_empty_confusables)]
#[diag("expected at least one confusable name")]
pub(crate) struct EmptyConfusables {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[help]
#[diag(attr_parsing_invalid_target)]
#[help("`#[{$name}]` can {$only}be applied to {$applied}")]
#[diag("`#[{$name}]` attribute cannot be used on {$target}")]
pub(crate) struct InvalidTarget {
#[primary_span]
#[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")]
#[suggestion(
"remove the attribute",
code = "",
applicability = "machine-applicable",
style = "tool-only"
)]
pub span: Span,
pub name: AttrPath,
pub target: &'static str,
@ -459,7 +466,7 @@ pub(crate) struct InvalidTarget {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_alignment_value, code = E0589)]
#[diag("invalid alignment value: {$error_part}", code = E0589)]
pub(crate) struct InvalidAlignmentValue {
#[primary_span]
pub span: Span,
@ -467,43 +474,49 @@ pub(crate) struct InvalidAlignmentValue {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_repr_ident, code = E0565)]
#[diag("meta item in `repr` must be an identifier", code = E0565)]
pub(crate) struct ReprIdent {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_unrecognized_repr_hint, code = E0552)]
#[help]
#[note]
#[diag("unrecognized representation hint", code = E0552)]
#[help(
"valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize`"
)]
#[note(
"for more information, visit <https://doc.rust-lang.org/reference/type-layout.html?highlight=repr#representations>"
)]
pub(crate) struct UnrecognizedReprHint {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_unstable_feature_bound_incompatible_stability)]
#[help]
#[diag("item annotated with `#[unstable_feature_bound]` should not be stable")]
#[help(
"if this item is meant to be stable, do not use any functions annotated with `#[unstable_feature_bound]`. Otherwise, mark this item as unstable with `#[unstable]`"
)]
pub(crate) struct UnstableFeatureBoundIncompatibleStability {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_naked_functions_incompatible_attribute, code = E0736)]
#[diag("attribute incompatible with `#[unsafe(naked)]`", code = E0736)]
pub(crate) struct NakedFunctionIncompatibleAttribute {
#[primary_span]
#[label]
#[label("the `{$attr}` attribute is incompatible with `#[unsafe(naked)]`")]
pub span: Span,
#[label(attr_parsing_naked_attribute)]
#[label("function marked with `#[unsafe(naked)]` here")]
pub naked_span: Span,
pub attr: String,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_link_ordinal_out_of_range)]
#[note]
#[diag("ordinal value in `link_ordinal` is too large: `{$ordinal}`")]
#[note("the value may not exceed `u16::MAX`")]
pub(crate) struct LinkOrdinalOutOfRange {
#[primary_span]
pub span: Span,
@ -511,8 +524,8 @@ pub(crate) struct LinkOrdinalOutOfRange {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_rustc_scalable_vector_count_out_of_range)]
#[note]
#[diag("element count in `rustc_scalable_vector` is too large: `{$n}`")]
#[note("the value may not exceed `u16::MAX`")]
pub(crate) struct RustcScalableVectorCountOutOfRange {
#[primary_span]
pub span: Span,
@ -524,6 +537,7 @@ pub(crate) enum AttributeParseErrorReason<'a> {
ExpectedStringLiteral {
byte_string: Option<Span>,
},
ExpectedFilenameLiteral,
ExpectedIntegerLiteral,
ExpectedIntegerLiteralInRange {
lower_bound: isize,
@ -586,7 +600,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> {
if let Some(start_point_span) = byte_string {
diag.span_suggestion(
start_point_span,
fluent::attr_parsing_unsupported_literal_suggestion,
"consider removing the prefix",
"",
Applicability::MaybeIncorrect,
);
@ -597,6 +611,9 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> {
diag.span_label(self.span, "expected a string literal here");
}
}
AttributeParseErrorReason::ExpectedFilenameLiteral => {
diag.span_label(self.span, "expected a filename string literal here");
}
AttributeParseErrorReason::ExpectedIntegerLiteral => {
diag.span_label(self.span, "expected an integer literal here");
}
@ -751,30 +768,27 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_attr_unsafe)]
#[note]
#[diag("`{$name}` is not an unsafe attribute")]
#[note("extraneous unsafe is not allowed in attributes")]
pub(crate) struct InvalidAttrUnsafe {
#[primary_span]
#[label]
#[label("this is not an unsafe attribute")]
pub span: Span,
pub name: AttrPath,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_unsafe_attr_outside_unsafe)]
#[diag("unsafe attribute used without unsafe")]
pub(crate) struct UnsafeAttrOutsideUnsafe {
#[primary_span]
#[label]
#[label("usage of unsafe attribute")]
pub span: Span,
#[subdiagnostic]
pub suggestion: Option<UnsafeAttrOutsideUnsafeSuggestion>,
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(
attr_parsing_unsafe_attr_outside_unsafe_suggestion,
applicability = "machine-applicable"
)]
#[multipart_suggestion("wrap the attribute in `unsafe(...)`", applicability = "machine-applicable")]
pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion {
#[suggestion_part(code = "unsafe(")]
pub left: Span,
@ -783,7 +797,7 @@ pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_meta_bad_delim)]
#[diag("wrong meta list delimiters")]
pub(crate) struct MetaBadDelim {
#[primary_span]
pub span: Span,
@ -793,7 +807,7 @@ pub(crate) struct MetaBadDelim {
#[derive(Subdiagnostic)]
#[multipart_suggestion(
attr_parsing_meta_bad_delim_suggestion,
"the delimiters should be `(` and `)`",
applicability = "machine-applicable"
)]
pub(crate) struct MetaBadDelimSugg {
@ -804,7 +818,7 @@ pub(crate) struct MetaBadDelimSugg {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_meta_item)]
#[diag("expected a literal (`1u8`, `1.0f32`, `\"string\"`, etc.) here, found {$descr}")]
pub(crate) struct InvalidMetaItem {
#[primary_span]
pub span: Span,
@ -813,12 +827,15 @@ pub(crate) struct InvalidMetaItem {
pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>,
#[subdiagnostic]
pub remove_neg_sugg: Option<InvalidMetaItemRemoveNegSugg>,
#[label]
#[label("{$descr}s are not allowed here")]
pub label: Option<Span>,
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(attr_parsing_quote_ident_sugg, applicability = "machine-applicable")]
#[multipart_suggestion(
"surround the identifier with quotation marks to make it into a string literal",
applicability = "machine-applicable"
)]
pub(crate) struct InvalidMetaItemQuoteIdentSugg {
#[suggestion_part(code = "\"")]
pub before: Span,
@ -827,73 +844,80 @@ pub(crate) struct InvalidMetaItemQuoteIdentSugg {
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(attr_parsing_remove_neg_sugg, applicability = "machine-applicable")]
#[multipart_suggestion(
"negative numbers are not literals, try removing the `-` sign",
applicability = "machine-applicable"
)]
pub(crate) struct InvalidMetaItemRemoveNegSugg {
#[suggestion_part(code = "")]
pub negative_sign: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_suffixed_literal_in_attribute)]
#[help]
#[diag("suffixed literals are not allowed in attributes")]
#[help(
"instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)"
)]
pub(crate) struct SuffixedLiteralInAttribute {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_empty_link_name, code = E0454)]
#[diag("link name must not be empty", code = E0454)]
pub(crate) struct EmptyLinkName {
#[primary_span]
#[label]
#[label("empty link name")]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_link_framework_apple, code = E0455)]
#[diag("link kind `framework` is only supported on Apple targets", code = E0455)]
pub(crate) struct LinkFrameworkApple {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_incompatible_wasm_link)]
#[diag("`wasm_import_module` is incompatible with other arguments in `#[link]` attributes")]
pub(crate) struct IncompatibleWasmLink {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_link_requires_name, code = E0459)]
#[diag("`#[link]` attribute requires a `name = \"string\"` argument", code = E0459)]
pub(crate) struct LinkRequiresName {
#[primary_span]
#[label]
#[label("missing `name` argument")]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_raw_dylib_no_nul)]
#[diag("link name must not contain NUL characters if link kind is `raw-dylib`")]
pub(crate) struct RawDylibNoNul {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_raw_dylib_only_windows, code = E0455)]
#[diag("link kind `raw-dylib` is only supported on Windows targets", code = E0455)]
pub(crate) struct RawDylibOnlyWindows {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_invalid_link_modifier)]
#[diag(
"invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed"
)]
pub(crate) struct InvalidLinkModifier {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_multiple_modifiers)]
#[diag("multiple `{$modifier}` modifiers in a single `modifiers` argument")]
pub(crate) struct MultipleModifiers {
#[primary_span]
pub span: Span,
@ -901,52 +925,54 @@ pub(crate) struct MultipleModifiers {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_import_name_type_x86)]
#[diag("import name type is only supported on x86")]
pub(crate) struct ImportNameTypeX86 {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_bundle_needs_static)]
#[diag("linking modifier `bundle` is only compatible with `static` linking kind")]
pub(crate) struct BundleNeedsStatic {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_whole_archive_needs_static)]
#[diag("linking modifier `whole-archive` is only compatible with `static` linking kind")]
pub(crate) struct WholeArchiveNeedsStatic {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_as_needed_compatibility)]
#[diag(
"linking modifier `as-needed` is only compatible with `dylib`, `framework` and `raw-dylib` linking kinds"
)]
pub(crate) struct AsNeededCompatibility {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_import_name_type_raw)]
#[diag("import name type can only be used with link kind `raw-dylib`")]
pub(crate) struct ImportNameTypeRaw {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_limit_invalid)]
#[diag("`limit` must be a non-negative integer")]
pub(crate) struct LimitInvalid<'a> {
#[primary_span]
pub span: Span,
#[label]
#[label("{$error_str}")]
pub value_span: Span,
pub error_str: &'a str,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_cfg_attr_bad_delim)]
#[diag("wrong `cfg_attr` delimiters")]
pub(crate) struct CfgAttrBadDelim {
#[primary_span]
pub span: Span,
@ -955,14 +981,16 @@ pub(crate) struct CfgAttrBadDelim {
}
#[derive(Diagnostic)]
#[diag(attr_parsing_doc_alias_malformed)]
#[diag(
"doc alias attribute expects a string `#[doc(alias = \"a\")]` or a list of strings `#[doc(alias(\"a\", \"b\"))]`"
)]
pub(crate) struct DocAliasMalformed {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(attr_parsing_unsupported_instruction_set)]
#[diag("target `{$current_target}` does not support `#[instruction_set({$instruction_set}::*)]`")]
pub(crate) struct UnsupportedInstructionSet<'a> {
#[primary_span]
pub span: Span,

View file

@ -2309,12 +2309,12 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
tcx: TyCtxt<'hir>,
issue_span: Span,
expr_span: Span,
body_expr: Option<&'hir hir::Expr<'hir>>,
loop_bind: Option<&'hir Ident>,
loop_span: Option<Span>,
head_span: Option<Span>,
pat_span: Option<Span>,
head: Option<&'hir hir::Expr<'hir>>,
body_expr: Option<&'hir hir::Expr<'hir>> = None,
loop_bind: Option<&'hir Ident> = None,
loop_span: Option<Span> = None,
head_span: Option<Span> = None,
pat_span: Option<Span> = None,
head: Option<&'hir hir::Expr<'hir>> = None,
}
impl<'hir> Visitor<'hir> for ExprFinder<'hir> {
fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
@ -2380,17 +2380,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
hir::intravisit::walk_expr(self, ex);
}
}
let mut finder = ExprFinder {
tcx,
expr_span: span,
issue_span,
loop_bind: None,
body_expr: None,
head_span: None,
loop_span: None,
pat_span: None,
head: None,
};
let mut finder = ExprFinder { tcx, expr_span: span, issue_span, .. };
finder.visit_expr(tcx.hir_body(body_id).value);
if let Some(body_expr) = finder.body_expr
@ -2625,13 +2615,13 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
struct ExpressionFinder<'tcx> {
capture_span: Span,
closure_change_spans: Vec<Span>,
closure_arg_span: Option<Span>,
in_closure: bool,
suggest_arg: String,
closure_change_spans: Vec<Span> = vec![],
closure_arg_span: Option<Span> = None,
in_closure: bool = false,
suggest_arg: String = String::new(),
tcx: TyCtxt<'tcx>,
closure_local_id: Option<hir::HirId>,
closure_call_changes: Vec<(Span, String)>,
closure_local_id: Option<hir::HirId> = None,
closure_call_changes: Vec<(Span, String)> = vec![],
}
impl<'hir> Visitor<'hir> for ExpressionFinder<'hir> {
fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
@ -2712,16 +2702,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
}) = self.infcx.tcx.hir_node(self.mir_hir_id())
&& let hir::Node::Expr(expr) = self.infcx.tcx.hir_node(body_id.hir_id)
{
let mut finder = ExpressionFinder {
capture_span: *capture_kind_span,
closure_change_spans: vec![],
closure_arg_span: None,
in_closure: false,
suggest_arg: String::new(),
closure_local_id: None,
closure_call_changes: vec![],
tcx: self.infcx.tcx,
};
let mut finder =
ExpressionFinder { capture_span: *capture_kind_span, tcx: self.infcx.tcx, .. };
finder.visit_expr(expr);
if finder.closure_change_spans.is_empty() || finder.closure_call_changes.is_empty() {

View file

@ -4,6 +4,7 @@
#![allow(internal_features)]
#![feature(assert_matches)]
#![feature(box_patterns)]
#![feature(default_field_values)]
#![feature(file_buffered)]
#![feature(if_let_guard)]
#![feature(negative_impls)]

View file

@ -13,6 +13,7 @@ use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpa
use rustc_hir::attrs::CfgEntry;
use rustc_hir::{AttrPath, Target};
use rustc_parse::exp;
use rustc_parse::parser::Recovery;
use rustc_span::{ErrorGuaranteed, Span, sym};
use crate::errors;
@ -42,7 +43,7 @@ fn parse_cfg(cx: &ExtCtxt<'_>, span: Span, tts: TokenStream) -> Result<CfgEntry,
let meta = MetaItemOrLitParser::parse_single(
&mut parser,
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
)
.map_err(|diag| diag.emit())?;
let cfg = AttributeParser::parse_single_args(
@ -58,7 +59,7 @@ fn parse_cfg(cx: &ExtCtxt<'_>, span: Span, tts: TokenStream) -> Result<CfgEntry,
// Doesn't matter what the target actually is here.
Target::Crate,
Some(cx.ecfg.features),
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
&meta,
parse_cfg_entry,
&CFG_TEMPLATE,

View file

@ -28,7 +28,6 @@ rustc_lint_defs = { path = "../rustc_lint_defs" }
rustc_macros = { path = "../rustc_macros" }
rustc_metadata = { path = "../rustc_metadata" }
rustc_middle = { path = "../rustc_middle" }
rustc_query_system = { path = "../rustc_query_system" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }

View file

@ -24,7 +24,7 @@ use rustc_data_structures::unord::UnordMap;
use rustc_hir::CRATE_HIR_ID;
use rustc_hir::attrs::{CfgEntry, NativeLibKind, WindowsSubsystemKind};
use rustc_hir::def_id::CrateNum;
use rustc_macros::{Decodable, Encodable, HashStable};
use rustc_macros::{Decodable, Encodable};
use rustc_metadata::EncodedMetadata;
use rustc_middle::dep_graph::WorkProduct;
use rustc_middle::lint::LevelAndSource;
@ -175,7 +175,12 @@ bitflags::bitflags! {
}
}
#[derive(Clone, Debug, Encodable, Decodable, HashStable)]
// This is the same as `rustc_session::cstore::NativeLib`, except:
// - (important) the `foreign_module` field is missing, because it contains a `DefId`, which can't
// be encoded with `FileEncoder`.
// - (less important) the `verbatim` field is a `bool` rather than an `Option<bool>`, because here
// we can treat `false` and `absent` the same.
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct NativeLib {
pub kind: NativeLibKind,
pub name: Symbol,

View file

@ -29,8 +29,6 @@ struct Slot<V> {
struct SlotIndex {
// the index of the bucket in VecCache (0 to 20)
bucket_idx: usize,
// number of entries in that bucket
entries: usize,
// the index of the slot within the bucket
index_in_bucket: usize,
}
@ -39,12 +37,12 @@ struct SlotIndex {
// compile-time. Visiting all powers of two is enough to hit all the buckets.
//
// We confirm counts are accurate in the slot_index_exhaustive test.
const ENTRIES_BY_BUCKET: [usize; 21] = {
let mut entries = [0; 21];
const ENTRIES_BY_BUCKET: [usize; BUCKETS] = {
let mut entries = [0; BUCKETS];
let mut key = 0;
loop {
let si = SlotIndex::from_index(key);
entries[si.bucket_idx] = si.entries;
entries[si.bucket_idx] = si.entries();
if key == 0 {
key = 1;
} else if key == (1 << 31) {
@ -56,7 +54,14 @@ const ENTRIES_BY_BUCKET: [usize; 21] = {
entries
};
const BUCKETS: usize = 21;
impl SlotIndex {
/// The total possible number of entries in the bucket
const fn entries(&self) -> usize {
if self.bucket_idx == 0 { 1 << 12 } else { 1 << (self.bucket_idx + 11) }
}
// This unpacks a flat u32 index into identifying which bucket it belongs to and the offset
// within that bucket. As noted in the VecCache docs, buckets double in size with each index.
// Typically that would mean 31 buckets (2^0 + 2^1 ... + 2^31 = u32::MAX - 1), but to reduce
@ -70,18 +75,13 @@ impl SlotIndex {
const fn from_index(idx: u32) -> Self {
const FIRST_BUCKET_SHIFT: usize = 12;
if idx < (1 << FIRST_BUCKET_SHIFT) {
return SlotIndex {
bucket_idx: 0,
entries: 1 << FIRST_BUCKET_SHIFT,
index_in_bucket: idx as usize,
};
return SlotIndex { bucket_idx: 0, index_in_bucket: idx as usize };
}
// We already ruled out idx 0, so this `ilog2` never panics (and the check optimizes away)
let bucket = idx.ilog2() as usize;
let entries = 1 << bucket;
SlotIndex {
bucket_idx: bucket - FIRST_BUCKET_SHIFT + 1,
entries,
index_in_bucket: idx as usize - entries,
}
}
@ -98,7 +98,7 @@ impl SlotIndex {
if ptr.is_null() {
return None;
}
assert!(self.index_in_bucket < self.entries);
debug_assert!(self.index_in_bucket < self.entries());
// SAFETY: `bucket` was allocated (so <= isize in total bytes) to hold `entries`, so this
// must be inbounds.
let slot = unsafe { ptr.add(self.index_in_bucket) };
@ -126,11 +126,12 @@ impl SlotIndex {
fn bucket_ptr<V>(&self, bucket: &AtomicPtr<Slot<V>>) -> *mut Slot<V> {
let ptr = bucket.load(Ordering::Acquire);
if ptr.is_null() { self.initialize_bucket(bucket) } else { ptr }
if ptr.is_null() { Self::initialize_bucket(bucket, self.bucket_idx) } else { ptr }
}
#[cold]
fn initialize_bucket<V>(&self, bucket: &AtomicPtr<Slot<V>>) -> *mut Slot<V> {
#[inline(never)]
fn initialize_bucket<V>(bucket: &AtomicPtr<Slot<V>>, bucket_idx: usize) -> *mut Slot<V> {
static LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(());
// If we are initializing the bucket, then acquire a global lock.
@ -144,8 +145,8 @@ impl SlotIndex {
// OK, now under the allocator lock, if we're still null then it's definitely us that will
// initialize this bucket.
if ptr.is_null() {
let bucket_layout =
std::alloc::Layout::array::<Slot<V>>(self.entries as usize).unwrap();
let bucket_len = SlotIndex { bucket_idx, index_in_bucket: 0 }.entries();
let bucket_layout = std::alloc::Layout::array::<Slot<V>>(bucket_len).unwrap();
// This is more of a sanity check -- this code is very cold, so it's safe to pay a
// little extra cost here.
assert!(bucket_layout.size() > 0);
@ -171,7 +172,7 @@ impl SlotIndex {
let bucket = unsafe { buckets.get_unchecked(self.bucket_idx) };
let ptr = self.bucket_ptr(bucket);
assert!(self.index_in_bucket < self.entries);
debug_assert!(self.index_in_bucket < self.entries());
// SAFETY: `bucket` was allocated (so <= isize in total bytes) to hold `entries`, so this
// must be inbounds.
let slot = unsafe { ptr.add(self.index_in_bucket) };
@ -204,6 +205,31 @@ impl SlotIndex {
Err(_) => false,
}
}
/// Inserts into the map, given that the slot is unique, so it won't race with other threads.
#[inline]
unsafe fn put_unique<V>(&self, buckets: &[AtomicPtr<Slot<V>>; 21], value: V, extra: u32) {
// SAFETY: `bucket_idx` is ilog2(u32).saturating_sub(11), which is at most 21, i.e.,
// in-bounds of buckets.
let bucket = unsafe { buckets.get_unchecked(self.bucket_idx) };
let ptr = self.bucket_ptr(bucket);
debug_assert!(self.index_in_bucket < self.entries());
// SAFETY: `bucket` was allocated (so <= isize in total bytes) to hold `entries`, so this
// must be inbounds.
let slot = unsafe { ptr.add(self.index_in_bucket) };
// SAFETY: We known our slot is unique as a precondition of this function, so this can't race.
unsafe {
(&raw mut (*slot).value).write(value);
}
// SAFETY: initialized bucket has zeroed all memory within the bucket, so we are valid for
// AtomicU32 access.
let index_and_lock = unsafe { &(*slot).index_and_lock };
index_and_lock.store(extra.checked_add(2).unwrap(), Ordering::Release);
}
}
/// In-memory cache for queries whose keys are densely-numbered IDs
@ -229,11 +255,11 @@ pub struct VecCache<K: Idx, V, I> {
// Bucket 19: 1073741824
// Bucket 20: 2147483648
// The total number of entries if all buckets are initialized is u32::MAX-1.
buckets: [AtomicPtr<Slot<V>>; 21],
buckets: [AtomicPtr<Slot<V>>; BUCKETS],
// In the compiler's current usage these are only *read* during incremental and self-profiling.
// They are an optimization over iterating the full buckets array.
present: [AtomicPtr<Slot<()>>; 21],
present: [AtomicPtr<Slot<()>>; BUCKETS],
len: AtomicUsize,
key: PhantomData<(K, I)>,
@ -307,9 +333,11 @@ where
let slot_idx = SlotIndex::from_index(key);
if slot_idx.put(&self.buckets, value, index.index() as u32) {
let present_idx = self.len.fetch_add(1, Ordering::Relaxed);
let slot = SlotIndex::from_index(present_idx as u32);
// We should always be uniquely putting due to `len` fetch_add returning unique values.
assert!(slot.put(&self.present, (), key));
let slot = SlotIndex::from_index(u32::try_from(present_idx).unwrap());
// SAFETY: We should always be uniquely putting due to `len` fetch_add returning unique values.
// We can't get here if `len` overflows because `put` will not succeed u32::MAX + 1 times
// as it will run out of slots.
unsafe { slot.put_unique(&self.present, (), key) };
}
}
@ -331,6 +359,10 @@ where
}
}
}
pub fn len(&self) -> usize {
self.len.load(Ordering::Acquire)
}
}
#[cfg(test)]

View file

@ -68,6 +68,13 @@ fn slot_entries_table() {
);
}
#[test]
fn bucket_entries_matches() {
for i in 0..BUCKETS {
assert_eq!(SlotIndex { bucket_idx: i, index_in_bucket: 0 }.entries(), ENTRIES_BY_BUCKET[i]);
}
}
#[test]
#[cfg(not(miri))]
fn slot_index_exhaustive() {
@ -81,14 +88,18 @@ fn slot_index_exhaustive() {
let mut prev = slot_idx;
for idx in 1..=u32::MAX {
let slot_idx = SlotIndex::from_index(idx);
// SAFETY: Ensure indices don't go out of bounds of buckets.
assert!(slot_idx.index_in_bucket < slot_idx.entries());
if prev.bucket_idx == slot_idx.bucket_idx {
assert_eq!(prev.index_in_bucket + 1, slot_idx.index_in_bucket);
} else {
assert_eq!(slot_idx.index_in_bucket, 0);
}
assert_eq!(buckets[slot_idx.bucket_idx], slot_idx.entries as u32);
assert_eq!(ENTRIES_BY_BUCKET[slot_idx.bucket_idx], slot_idx.entries, "{}", idx);
assert_eq!(buckets[slot_idx.bucket_idx], slot_idx.entries() as u32);
assert_eq!(ENTRIES_BY_BUCKET[slot_idx.bucket_idx], slot_idx.entries(), "{}", idx);
prev = slot_idx;
}

View file

@ -12,7 +12,6 @@ rustc_ast = { path = "../rustc_ast" }
rustc_ast_lowering = { path = "../rustc_ast_lowering" }
rustc_ast_passes = { path = "../rustc_ast_passes" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
rustc_borrowck = { path = "../rustc_borrowck" }
rustc_builtin_macros = { path = "../rustc_builtin_macros" }
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
@ -21,13 +20,11 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_expand = { path = "../rustc_expand" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir_analysis = { path = "../rustc_hir_analysis" }
rustc_hir_pretty = { path = "../rustc_hir_pretty" }
rustc_hir_typeck = { path = "../rustc_hir_typeck" }
rustc_incremental = { path = "../rustc_incremental" }
rustc_index = { path = "../rustc_index" }
rustc_infer = { path = "../rustc_infer" }
rustc_interface = { path = "../rustc_interface" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_lint = { path = "../rustc_lint" }
@ -44,7 +41,6 @@ rustc_passes = { path = "../rustc_passes" }
rustc_pattern_analysis = { path = "../rustc_pattern_analysis" }
rustc_privacy = { path = "../rustc_privacy" }
rustc_public = { path = "../rustc_public", features = ["rustc_internal"] }
rustc_query_system = { path = "../rustc_query_system" }
rustc_resolve = { path = "../rustc_resolve" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }

View file

@ -1,29 +0,0 @@
driver_impl_cant_emit_mir = could not emit MIR: {$error}
driver_impl_ice = the compiler unexpectedly panicked. this is a bug.
driver_impl_ice_bug_report = we would appreciate a bug report: {$bug_report_url}
driver_impl_ice_bug_report_internal_feature = using internal features is not supported and expected to cause internal compiler errors when used incorrectly
driver_impl_ice_bug_report_update_note = please make sure that you have updated to the latest nightly
driver_impl_ice_exclude_cargo_defaults = some of the compiler flags provided by cargo are hidden
driver_impl_ice_flags = compiler flags: {$flags}
driver_impl_ice_path = please attach the file at `{$path}` to your bug report
driver_impl_ice_path_error = the ICE couldn't be written to `{$path}`: {$error}
driver_impl_ice_path_error_env = the environment variable `RUSTC_ICE` is set to `{$env_var}`
driver_impl_ice_version = rustc {$version} running on {$triple}
driver_impl_rlink_corrupt_file = corrupt metadata encountered in `{$file}`
driver_impl_rlink_empty_version_number = the input does not contain version number
driver_impl_rlink_encoding_version_mismatch = .rlink file was produced with encoding version `{$version_array}`, but the current version is `{$rlink_version}`
driver_impl_rlink_no_a_file = rlink must be a file
driver_impl_rlink_rustc_version_mismatch = .rlink file was produced by rustc version `{$rustc_version}`, but the current version is `{$current_version}`
driver_impl_rlink_unable_to_read = failed to read rlink file: `{$err}`
driver_impl_rlink_wrong_file_type = the input does not look like a .rlink file
driver_impl_unstable_feature_usage = cannot dump feature usage metrics: {$error}

View file

@ -108,18 +108,14 @@ use crate::session_diagnostics::{
RLinkWrongFileType, RlinkCorruptFile, RlinkNotAFile, RlinkUnableToRead, UnstableFeatureUsage,
};
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
pub fn default_translator() -> Translator {
Translator::with_fallback_bundle(DEFAULT_LOCALE_RESOURCES.to_vec(), false)
}
pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[
// tidy-alphabetical-start
crate::DEFAULT_LOCALE_RESOURCE,
rustc_ast_lowering::DEFAULT_LOCALE_RESOURCE,
rustc_ast_passes::DEFAULT_LOCALE_RESOURCE,
rustc_attr_parsing::DEFAULT_LOCALE_RESOURCE,
rustc_borrowck::DEFAULT_LOCALE_RESOURCE,
rustc_builtin_macros::DEFAULT_LOCALE_RESOURCE,
rustc_codegen_ssa::DEFAULT_LOCALE_RESOURCE,
@ -129,7 +125,6 @@ pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[
rustc_hir_analysis::DEFAULT_LOCALE_RESOURCE,
rustc_hir_typeck::DEFAULT_LOCALE_RESOURCE,
rustc_incremental::DEFAULT_LOCALE_RESOURCE,
rustc_infer::DEFAULT_LOCALE_RESOURCE,
rustc_interface::DEFAULT_LOCALE_RESOURCE,
rustc_lint::DEFAULT_LOCALE_RESOURCE,
rustc_metadata::DEFAULT_LOCALE_RESOURCE,
@ -142,7 +137,6 @@ pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[
rustc_passes::DEFAULT_LOCALE_RESOURCE,
rustc_pattern_analysis::DEFAULT_LOCALE_RESOURCE,
rustc_privacy::DEFAULT_LOCALE_RESOURCE,
rustc_query_system::DEFAULT_LOCALE_RESOURCE,
rustc_resolve::DEFAULT_LOCALE_RESOURCE,
rustc_session::DEFAULT_LOCALE_RESOURCE,
rustc_trait_selection::DEFAULT_LOCALE_RESOURCE,
@ -491,10 +485,18 @@ fn handle_explain(early_dcx: &EarlyDiagCtxt, registry: Registry, code: &str, col
}
text.push('\n');
}
// If output is a terminal, use a pager to display the content.
if io::stdout().is_terminal() {
show_md_content_with_pager(&text, color);
} else {
safe_print!("{text}");
// Otherwise, if the user has requested colored output
// print the content in color, else print the md content.
if color == ColorConfig::Always {
show_colored_md_content(&text);
} else {
safe_print!("{text}");
}
}
} else {
early_dcx.early_fatal(format!("{code} is not a valid error code"));
@ -564,6 +566,33 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
safe_print!("{content}");
}
/// Prints the markdown content with colored output.
///
/// This function is used when the output is not a terminal,
/// but the user has requested colored output with `--color=always`.
fn show_colored_md_content(content: &str) {
// Try to prettify the raw markdown text.
let mut pretty_data = {
let mdstream = markdown::MdStream::parse_str(content);
let bufwtr = markdown::create_stdout_bufwtr();
let mut mdbuf = Vec::new();
if mdstream.write_anstream_buf(&mut mdbuf, Some(&highlighter::highlight)).is_ok() {
Some((bufwtr, mdbuf))
} else {
None
}
};
if let Some((bufwtr, mdbuf)) = &mut pretty_data
&& bufwtr.write_all(&mdbuf).is_ok()
{
return;
}
// Everything failed. Print the raw markdown text.
safe_print!("{content}");
}
fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
assert!(sess.opts.unstable_opts.link_only);
let dcx = sess.dcx();

View file

@ -3,82 +3,88 @@ use std::error::Error;
use rustc_macros::{Diagnostic, Subdiagnostic};
#[derive(Diagnostic)]
#[diag(driver_impl_cant_emit_mir)]
#[diag("could not emit MIR: {$error}")]
pub struct CantEmitMIR {
pub error: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_unable_to_read)]
#[diag("failed to read rlink file: `{$err}`")]
pub(crate) struct RlinkUnableToRead {
pub err: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_wrong_file_type)]
#[diag("the input does not look like a .rlink file")]
pub(crate) struct RLinkWrongFileType;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_empty_version_number)]
#[diag("the input does not contain version number")]
pub(crate) struct RLinkEmptyVersionNumber;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_encoding_version_mismatch)]
#[diag(
".rlink file was produced with encoding version `{$version_array}`, but the current version is `{$rlink_version}`"
)]
pub(crate) struct RLinkEncodingVersionMismatch {
pub version_array: String,
pub rlink_version: u32,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_rustc_version_mismatch)]
#[diag(
".rlink file was produced by rustc version `{$rustc_version}`, but the current version is `{$current_version}`"
)]
pub(crate) struct RLinkRustcVersionMismatch<'a> {
pub rustc_version: String,
pub current_version: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_no_a_file)]
#[diag("rlink must be a file")]
pub(crate) struct RlinkNotAFile;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_corrupt_file)]
#[diag("corrupt metadata encountered in `{$file}`")]
pub(crate) struct RlinkCorruptFile<'a> {
pub file: &'a std::path::Path,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice)]
#[diag("the compiler unexpectedly panicked. this is a bug.")]
pub(crate) struct Ice;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report)]
#[diag("we would appreciate a bug report: {$bug_report_url}")]
pub(crate) struct IceBugReport<'a> {
pub bug_report_url: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report_update_note)]
#[diag("please make sure that you have updated to the latest nightly")]
pub(crate) struct UpdateNightlyNote;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report_internal_feature)]
#[diag(
"using internal features is not supported and expected to cause internal compiler errors when used incorrectly"
)]
pub(crate) struct IceBugReportInternalFeature;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_version)]
#[diag("rustc {$version} running on {$triple}")]
pub(crate) struct IceVersion<'a> {
pub version: &'a str,
pub triple: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_path)]
#[diag("please attach the file at `{$path}` to your bug report")]
pub(crate) struct IcePath {
pub path: std::path::PathBuf,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_path_error)]
#[diag("the ICE couldn't be written to `{$path}`: {$error}")]
pub(crate) struct IcePathError {
pub path: std::path::PathBuf,
pub error: String,
@ -87,23 +93,23 @@ pub(crate) struct IcePathError {
}
#[derive(Subdiagnostic)]
#[note(driver_impl_ice_path_error_env)]
#[note("the environment variable `RUSTC_ICE` is set to `{$env_var}`")]
pub(crate) struct IcePathErrorEnv {
pub env_var: std::path::PathBuf,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_flags)]
#[diag("compiler flags: {$flags}")]
pub(crate) struct IceFlags {
pub flags: String,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_exclude_cargo_defaults)]
#[diag("some of the compiler flags provided by cargo are hidden")]
pub(crate) struct IceExcludeCargoDefaults;
#[derive(Diagnostic)]
#[diag(driver_impl_unstable_feature_usage)]
#[diag("cannot dump feature usage metrics: {$error}")]
pub(crate) struct UnstableFeatureUsage {
pub error: Box<dyn Error>,
}

View file

@ -247,6 +247,9 @@ pub enum SubdiagMessage {
/// Identifier of a Fluent message. Instances of this variant are generated by the
/// `Subdiagnostic` derive.
FluentIdentifier(FluentId),
/// An inline Fluent message. Instances of this variant are generated by the
/// `Subdiagnostic` derive.
Inline(Cow<'static, str>),
/// Attribute of a Fluent message. Needs to be combined with a Fluent identifier to produce an
/// actual translated message. Instances of this variant are generated by the `fluent_messages`
/// macro.
@ -291,6 +294,8 @@ pub enum DiagMessage {
/// <https://projectfluent.org/fluent/guide/hello.html>
/// <https://projectfluent.org/fluent/guide/attributes.html>
FluentIdentifier(FluentId, Option<FluentId>),
/// An inline Fluent message, containing the to be translated diagnostic message.
Inline(Cow<'static, str>),
}
impl DiagMessage {
@ -305,21 +310,22 @@ impl DiagMessage {
SubdiagMessage::FluentIdentifier(id) => {
return DiagMessage::FluentIdentifier(id, None);
}
SubdiagMessage::Inline(s) => return DiagMessage::Inline(s),
SubdiagMessage::FluentAttr(attr) => attr,
};
match self {
DiagMessage::Str(s) => DiagMessage::Str(s.clone()),
DiagMessage::FluentIdentifier(id, _) => {
DiagMessage::FluentIdentifier(id.clone(), Some(attr))
}
_ => panic!("Tried to add a subdiagnostic to a message without a fluent identifier"),
}
}
pub fn as_str(&self) -> Option<&str> {
match self {
DiagMessage::Str(s) => Some(s),
DiagMessage::FluentIdentifier(_, _) => None,
DiagMessage::FluentIdentifier(_, _) | DiagMessage::Inline(_) => None,
}
}
}
@ -353,6 +359,7 @@ impl From<DiagMessage> for SubdiagMessage {
// There isn't really a sensible behaviour for this because it loses information but
// this is the most sensible of the behaviours.
DiagMessage::FluentIdentifier(_, Some(attr)) => SubdiagMessage::FluentAttr(attr),
DiagMessage::Inline(s) => SubdiagMessage::Inline(s),
}
}
}

View file

@ -3,11 +3,13 @@ use std::env;
use std::error::Report;
use std::sync::Arc;
use rustc_error_messages::langid;
pub use rustc_error_messages::{FluentArgs, LazyFallbackBundle};
use tracing::{debug, trace};
use crate::error::{TranslateError, TranslateErrorKind};
use crate::{DiagArg, DiagMessage, FluentBundle, Style};
use crate::fluent_bundle::FluentResource;
use crate::{DiagArg, DiagMessage, FluentBundle, Style, fluent_bundle};
/// Convert diagnostic arguments (a rustc internal type that exists to implement
/// `Encodable`/`Decodable`) into `FluentArgs` which is necessary to perform translation.
@ -79,6 +81,28 @@ impl Translator {
return Ok(Cow::Borrowed(msg));
}
DiagMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
// This translates an inline fluent diagnostic message
// It does this by creating a new `FluentBundle` with only one message,
// and then translating using this bundle.
DiagMessage::Inline(msg) => {
const GENERATED_MSG_ID: &str = "generated_msg";
let resource =
FluentResource::try_new(format!("{GENERATED_MSG_ID} = {msg}\n")).unwrap();
let mut bundle = fluent_bundle::FluentBundle::new(vec![langid!("en-US")]);
bundle.set_use_isolating(false);
bundle.add_resource(resource).unwrap();
let message = bundle.get_message(GENERATED_MSG_ID).unwrap();
let value = message.value().unwrap();
let mut errs = vec![];
let translated = bundle.format_pattern(value, Some(args), &mut errs).to_string();
debug!(?translated, ?errs);
return if errs.is_empty() {
Ok(Cow::Owned(translated))
} else {
Err(TranslateError::fluent(&Cow::Borrowed(GENERATED_MSG_ID), args, errs))
};
}
};
let translate_with_bundle =
|bundle: &'a FluentBundle| -> Result<Cow<'_, str>, TranslateError<'_>> {
@ -142,3 +166,14 @@ impl Translator {
}
}
}
/// This macro creates a translatable `DiagMessage` from a literal string.
/// It should be used in places where a translatable message is needed, but struct diagnostics are undesired.
///
/// This is a macro because in the future we may want to globally register these messages.
#[macro_export]
macro_rules! inline_fluent {
($inline: literal) => {
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed($inline))
};
}

View file

@ -20,6 +20,7 @@ use rustc_feature::{
UNSTABLE_LANG_FEATURES,
};
use rustc_hir::Target;
use rustc_parse::parser::Recovery;
use rustc_session::Session;
use rustc_session::parse::feature_err;
use rustc_span::{STDLIB_STABLE_CRATES, Span, Symbol, sym};
@ -395,7 +396,9 @@ impl<'a> StripUnconfigured<'a> {
fn in_cfg(&self, attrs: &[Attribute]) -> bool {
attrs.iter().all(|attr| {
!is_cfg(attr)
|| self.cfg_true(attr, ShouldEmit::ErrorsAndLints { recover: true }).as_bool()
|| self
.cfg_true(attr, ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed })
.as_bool()
})
}

View file

@ -26,7 +26,7 @@ use rustc_hir::def::MacroKinds;
use rustc_hir::limit::Limit;
use rustc_parse::parser::{
AllowConstBlockItems, AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser,
RecoverColon, RecoverComma, token_descr,
RecoverColon, RecoverComma, Recovery, token_descr,
};
use rustc_session::Session;
use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS};
@ -508,6 +508,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
// Unresolved macros produce dummy outputs as a recovery measure.
invocations.reverse();
let mut expanded_fragments = Vec::new();
let mut expanded_fragments_len = 0;
let mut undetermined_invocations = Vec::new();
let (mut progress, mut force) = (false, !self.monotonic);
loop {
@ -602,6 +603,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
expanded_fragments.push(Vec::new());
}
expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
expanded_fragments_len += 1;
invocations.extend(derive_invocations.into_iter().rev());
}
ExpandResult::Retry(invoc) => {
@ -622,7 +624,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
self.cx.force_mode = orig_force_mode;
// Finally incorporate all the expanded macros into the input AST fragment.
let mut placeholder_expander = PlaceholderExpander::default();
let mut placeholder_expander = PlaceholderExpander::with_capacity(expanded_fragments_len);
while let Some(expanded_fragments) = expanded_fragments.pop() {
for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() {
placeholder_expander
@ -2170,7 +2172,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
call.span(),
self.cx.current_expansion.lint_node_id,
Some(self.cx.ecfg.features),
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
);
let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span };
@ -2220,7 +2222,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
// Target doesn't matter for `cfg` parsing.
Target::Crate,
self.cfg().features,
ShouldEmit::ErrorsAndLints { recover: true },
ShouldEmit::ErrorsAndLints { recovery: Recovery::Allowed },
parse_cfg,
&CFG_TEMPLATE,
) else {

View file

@ -218,12 +218,17 @@ pub(crate) fn placeholder(
}
}
#[derive(Default)]
pub(crate) struct PlaceholderExpander {
expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
}
impl PlaceholderExpander {
pub(crate) fn with_capacity(capacity: usize) -> Self {
PlaceholderExpander {
expanded_fragments: FxHashMap::with_capacity_and_hasher(capacity, Default::default()),
}
}
pub(crate) fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment) {
fragment.mut_visit_with(self);
self.expanded_fragments.insert(id, fragment);

View file

@ -699,6 +699,21 @@ pub enum RustcLayoutType {
Debug,
}
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute, PartialEq, Eq)]
pub enum RustcMirKind {
PeekMaybeInit,
PeekMaybeUninit,
PeekLiveness,
StopAfterDataflow,
BorrowckGraphvizPostflow { path: PathBuf },
BorrowckGraphvizFormat { format: BorrowckGraphvizFormatKind },
}
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute, PartialEq, Eq)]
pub enum BorrowckGraphvizFormatKind {
TwoPhase,
}
/// Represents parsed *built-in* inert attributes.
///
/// ## Overview
@ -1057,6 +1072,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_has_incoherent_inherent_impls]`
RustcHasIncoherentInherentImpls,
/// Represents `#[rustc_hidden_type_of_opaques]`
RustcHiddenTypeOfOpaques,
/// Represents `#[rustc_layout]`
RustcLayout(ThinVec<RustcLayoutType>),
@ -1087,6 +1105,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_main]`.
RustcMain,
/// Represents `#[rustc_mir]`.
RustcMir(ThinVec<RustcMirKind>),
/// Represents `#[rustc_must_implement_one_of]`
RustcMustImplementOneOf { attr_span: Span, fn_names: ThinVec<Ident> },
@ -1123,6 +1144,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_pass_indirectly_in_non_rustic_abis]`
RustcPassIndirectlyInNonRusticAbis(Span),
/// Represents `#[rustc_preserve_ub_checks]`
RustcPreserveUbChecks,
/// Represents `#[rustc_pub_transparent]` (used by the `repr_transparent_external_private_fields` lint).
RustcPubTransparent(Span),

View file

@ -111,6 +111,7 @@ impl AttributeKind {
RustcDumpVtable(..) => No,
RustcDynIncompatibleTrait(..) => No,
RustcHasIncoherentInherentImpls => Yes,
RustcHiddenTypeOfOpaques => No,
RustcLayout(..) => No,
RustcLayoutScalarValidRangeEnd(..) => Yes,
RustcLayoutScalarValidRangeStart(..) => Yes,
@ -121,6 +122,7 @@ impl AttributeKind {
RustcLintUntrackedQueryInformation => Yes,
RustcMacroTransparency(..) => Yes,
RustcMain => No,
RustcMir(..) => Yes,
RustcMustImplementOneOf { .. } => No,
RustcNeverReturnsNullPointer => Yes,
RustcNoImplicitAutorefs => Yes,
@ -133,6 +135,7 @@ impl AttributeKind {
RustcParenSugar(..) => No,
RustcPassByValue(..) => Yes,
RustcPassIndirectlyInNonRusticAbis(..) => No,
RustcPreserveUbChecks => No,
RustcPubTransparent(..) => Yes,
RustcReallocator => No,
RustcScalableVector { .. } => Yes,

View file

@ -1,5 +1,6 @@
use std::num::NonZero;
use std::ops::Deref;
use std::path::PathBuf;
use rustc_abi::Align;
use rustc_ast::attr::data_structures::CfgEntry;
@ -96,7 +97,15 @@ impl<T: PrintAttribute> PrintAttribute for FxIndexMap<T, Span> {
p.word("]");
}
}
impl PrintAttribute for PathBuf {
fn should_render(&self) -> bool {
true
}
fn print_attribute(&self, p: &mut Printer) {
p.word(self.display().to_string());
}
}
macro_rules! print_skip {
($($t: ty),* $(,)?) => {$(
impl PrintAttribute for $t {

View file

@ -7,10 +7,9 @@ use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
use rustc_span::sym;
pub(crate) fn opaque_hidden_types(tcx: TyCtxt<'_>) {
if !tcx.has_attr(CRATE_DEF_ID, sym::rustc_hidden_type_of_opaques) {
if !find_attr!(tcx.get_all_attrs(CRATE_DEF_ID), AttributeKind::RustcHiddenTypeOfOpaques) {
return;
}
for id in tcx.hir_crate_items(()).opaques() {
if let hir::OpaqueTyOrigin::FnReturn { parent: fn_def_id, .. }
| hir::OpaqueTyOrigin::AsyncFn { parent: fn_def_id, .. } =

View file

@ -51,12 +51,12 @@ pub(super) fn diagnostic_hir_wf_check<'tcx>(
struct HirWfCheck<'tcx> {
tcx: TyCtxt<'tcx>,
predicate: ty::Predicate<'tcx>,
cause: Option<ObligationCause<'tcx>>,
cause_depth: usize,
cause: Option<ObligationCause<'tcx>> = None,
cause_depth: usize = 0,
icx: ItemCtxt<'tcx>,
def_id: LocalDefId,
param_env: ty::ParamEnv<'tcx>,
depth: usize,
depth: usize = 0,
}
impl<'tcx> Visitor<'tcx> for HirWfCheck<'tcx> {
@ -124,16 +124,8 @@ pub(super) fn diagnostic_hir_wf_check<'tcx>(
}
}
let mut visitor = HirWfCheck {
tcx,
predicate,
cause: None,
cause_depth: 0,
icx,
def_id,
param_env: tcx.param_env(def_id.to_def_id()),
depth: 0,
};
let param_env = tcx.param_env(def_id.to_def_id());
let mut visitor = HirWfCheck { tcx, predicate, icx, def_id, param_env, .. };
// Get the starting `hir::Ty` using our `WellFormedLoc`.
// We will walk 'into' this type to try to find

View file

@ -57,6 +57,7 @@ This API is completely unstable and subject to change.
// tidy-alphabetical-start
#![feature(assert_matches)]
#![feature(default_field_values)]
#![feature(gen_blocks)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]

View file

@ -2647,7 +2647,15 @@ impl<'a, 'b, 'tcx> FnCallDiagCtxt<'a, 'b, 'tcx> {
// To suggest a multipart suggestion when encountering `foo(1, "")` where the def
// was `fn foo(())`.
let (_, expected_ty) = self.formal_and_expected_inputs[expected_idx];
suggestions.push((*arg_span, self.ty_to_snippet(expected_ty, expected_idx)));
// Check if the new suggestion would overlap with any existing suggestion.
// This can happen when we have both removal suggestions (which may include
// adjacent commas) and type replacement suggestions for the same span.
let dominated = suggestions
.iter()
.any(|(span, _)| span.contains(*arg_span) || arg_span.overlaps(*span));
if !dominated {
suggestions.push((*arg_span, self.ty_to_snippet(expected_ty, expected_idx)));
}
}
}
}

View file

@ -200,6 +200,14 @@ fn typeck_with_inspect<'tcx>(
let wf_code = ObligationCauseCode::WellFormed(Some(WellFormedLoc::Ty(def_id)));
fcx.register_wf_obligation(expected_type.into(), body.value.span, wf_code);
if let hir::Node::AnonConst(_) = node {
fcx.require_type_is_sized(
expected_type,
body.value.span,
ObligationCauseCode::SizedConstOrStatic,
);
}
fcx.check_expr_coercible_to_type(body.value, expected_type, None);
fcx.write_ty(id, expected_type);

View file

@ -10,7 +10,6 @@ doctest = false
# tidy-alphabetical-start
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
rustc_macros = { path = "../rustc_macros" }

View file

@ -1,5 +0,0 @@
infer_opaque_hidden_type =
opaque type's hidden type cannot be another opaque type from the same scope
.label = one of the two opaque types used here has to be outside its defining scope
.opaque_type = opaque type whose hidden type is being assigned
.hidden_type = opaque type being used as hidden type

View file

@ -2,13 +2,13 @@ use rustc_macros::Diagnostic;
use rustc_span::Span;
#[derive(Diagnostic)]
#[diag(infer_opaque_hidden_type)]
#[diag("opaque type's hidden type cannot be another opaque type from the same scope")]
pub(crate) struct OpaqueHiddenTypeDiag {
#[primary_span]
#[label]
#[label("one of the two opaque types used here has to be outside its defining scope")]
pub span: Span,
#[note(infer_opaque_type)]
#[note("opaque type whose hidden type is being assigned")]
pub opaque_type: Span,
#[note(infer_hidden_type)]
#[note("opaque type being used as hidden type")]
pub hidden_type: Span,
}

View file

@ -22,5 +22,3 @@
mod errors;
pub mod infer;
pub mod traits;
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }

View file

@ -231,7 +231,12 @@ pub(crate) fn run_in_thread_pool_with_globals<
.name("rustc query cycle handler".to_string())
.spawn(move || {
let on_panic = defer(|| {
eprintln!("internal compiler error: query cycle handler thread panicked, aborting process");
// Split this long string so that it doesn't cause rustfmt to
// give up on the entire builder expression.
// <https://github.com/rust-lang/rustfmt/issues/3863>
const MESSAGE: &str = "\
internal compiler error: query cycle handler thread panicked, aborting process";
eprintln!("{MESSAGE}");
// We need to abort here as we failed to resolve the deadlock,
// otherwise the compiler could just hang,
process::abort();
@ -244,11 +249,16 @@ pub(crate) fn run_in_thread_pool_with_globals<
tls::with(|tcx| {
// Accessing session globals is sound as they outlive `GlobalCtxt`.
// They are needed to hash query keys containing spans or symbols.
let query_map = rustc_span::set_session_globals_then(unsafe { &*(session_globals as *const SessionGlobals) }, || {
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
QueryCtxt::new(tcx).collect_active_jobs(false).expect("failed to collect active queries in deadlock handler")
});
let query_map = rustc_span::set_session_globals_then(
unsafe { &*(session_globals as *const SessionGlobals) },
|| {
// Ensure there were no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
QueryCtxt::new(tcx).collect_active_jobs_from_all_queries(false).expect(
"failed to collect active queries in deadlock handler",
)
},
);
break_query_cycles(query_map, &registry);
})
})

View file

@ -20,6 +20,7 @@ declare_lint_pass! {
AMBIGUOUS_GLOB_IMPORTED_TRAITS,
AMBIGUOUS_GLOB_IMPORTS,
AMBIGUOUS_GLOB_REEXPORTS,
AMBIGUOUS_IMPORT_VISIBILITIES,
AMBIGUOUS_PANIC_IMPORTS,
ARITHMETIC_OVERFLOW,
ASM_SUB_REGISTER,
@ -4564,6 +4565,55 @@ declare_lint! {
};
}
declare_lint! {
/// The `ambiguous_import_visibilities` lint detects imports that should report ambiguity
/// errors, but previously didn't do that due to rustc bugs.
///
/// ### Example
///
/// ```rust,compile_fail
/// #![deny(unknown_lints)]
/// #![deny(ambiguous_import_visibilities)]
/// mod reexport {
/// mod m {
/// pub struct S {}
/// }
///
/// macro_rules! mac {
/// () => { use m::S; }
/// }
///
/// pub use m::*;
/// mac!();
///
/// pub use S as Z; // ambiguous visibility
/// }
///
/// fn main() {
/// reexport::Z {};
/// }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Previous versions of Rust compile it successfully because it
/// fetched the glob import's visibility for `pub use S as Z` import, and ignored the private
/// `use m::S` import that appeared later.
///
/// This is a [future-incompatible] lint to transition this to a
/// hard error in the future.
///
/// [future-incompatible]: ../index.md#future-incompatible-lints
pub AMBIGUOUS_IMPORT_VISIBILITIES,
Warn,
"detects certain glob imports that require reporting an ambiguity error",
@future_incompatible = FutureIncompatibleInfo {
reason: fcw!(FutureReleaseError #149145),
};
}
declare_lint! {
/// The `refining_impl_trait_reachable` lint detects `impl Trait` return
/// types in method signatures that are refined by a publically reachable

View file

@ -8,6 +8,8 @@ proc-macro = true
[dependencies]
# tidy-alphabetical-start
fluent-bundle = "0.16"
fluent-syntax = "0.12"
proc-macro2 = "1"
quote = "1"
syn = { version = "2.0.9", features = ["full"] }

View file

@ -22,20 +22,22 @@ impl<'a> DiagnosticDerive<'a> {
pub(crate) fn into_tokens(self) -> TokenStream {
let DiagnosticDerive { mut structure } = self;
let kind = DiagnosticDeriveKind::Diagnostic;
let slugs = RefCell::new(Vec::new());
let messages = RefCell::new(Vec::new());
let implementation = kind.each_variant(&mut structure, |mut builder, variant| {
let preamble = builder.preamble(variant);
let body = builder.body(variant);
let Some(slug) = builder.primary_message() else {
let Some(message) = builder.primary_message() else {
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
};
slugs.borrow_mut().push(slug.clone());
messages.borrow_mut().push(message.clone());
let message = message.diag_message(Some(variant));
let init = quote! {
let mut diag = rustc_errors::Diag::new(
dcx,
level,
crate::fluent_generated::#slug
#message
);
};
@ -66,7 +68,7 @@ impl<'a> DiagnosticDerive<'a> {
}
}
});
for test in slugs.borrow().iter().map(|s| generate_test(s, &structure)) {
for test in messages.borrow().iter().map(|s| s.generate_test(&structure)) {
imp.extend(test);
}
imp
@ -86,17 +88,18 @@ impl<'a> LintDiagnosticDerive<'a> {
pub(crate) fn into_tokens(self) -> TokenStream {
let LintDiagnosticDerive { mut structure } = self;
let kind = DiagnosticDeriveKind::LintDiagnostic;
let slugs = RefCell::new(Vec::new());
let messages = RefCell::new(Vec::new());
let implementation = kind.each_variant(&mut structure, |mut builder, variant| {
let preamble = builder.preamble(variant);
let body = builder.body(variant);
let Some(slug) = builder.primary_message() else {
let Some(message) = builder.primary_message() else {
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
};
slugs.borrow_mut().push(slug.clone());
messages.borrow_mut().push(message.clone());
let message = message.diag_message(Some(variant));
let primary_message = quote! {
diag.primary_message(crate::fluent_generated::#slug);
diag.primary_message(#message);
};
let formatting_init = &builder.formatting_init;
@ -122,47 +125,10 @@ impl<'a> LintDiagnosticDerive<'a> {
}
}
});
for test in slugs.borrow().iter().map(|s| generate_test(s, &structure)) {
for test in messages.borrow().iter().map(|s| s.generate_test(&structure)) {
imp.extend(test);
}
imp
}
}
/// Generates a `#[test]` that verifies that all referenced variables
/// exist on this structure.
fn generate_test(slug: &syn::Path, structure: &Structure<'_>) -> TokenStream {
// FIXME: We can't identify variables in a subdiagnostic
for field in structure.variants().iter().flat_map(|v| v.ast().fields.iter()) {
for attr_name in field.attrs.iter().filter_map(|at| at.path().get_ident()) {
if attr_name == "subdiagnostic" {
return quote!();
}
}
}
use std::sync::atomic::{AtomicUsize, Ordering};
// We need to make sure that the same diagnostic slug can be used multiple times without
// causing an error, so just have a global counter here.
static COUNTER: AtomicUsize = AtomicUsize::new(0);
let slug = slug.get_ident().unwrap();
let ident = quote::format_ident!("verify_{slug}_{}", COUNTER.fetch_add(1, Ordering::Relaxed));
let ref_slug = quote::format_ident!("{slug}_refs");
let struct_name = &structure.ast().ident;
let variables: Vec<_> = structure
.variants()
.iter()
.flat_map(|v| v.ast().fields.iter().filter_map(|f| f.ident.as_ref().map(|i| i.to_string())))
.collect();
// tidy errors on `#[test]` outside of test files, so we use `#[test ]` to work around this
quote! {
#[cfg(test)]
#[test ]
fn #ident() {
let variables = [#(#variables),*];
for vref in crate::fluent_generated::#ref_slug {
assert!(variables.contains(vref), "{}: variable `{vref}` not found ({})", stringify!(#struct_name), stringify!(#slug));
}
}
}
}

View file

@ -4,13 +4,14 @@ use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote, quote_spanned};
use syn::parse::ParseStream;
use syn::spanned::Spanned;
use syn::{Attribute, Meta, Path, Token, Type, parse_quote};
use syn::{Attribute, LitStr, Meta, Path, Token, Type, parse_quote};
use synstructure::{BindingInfo, Structure, VariantInfo};
use super::utils::SubdiagnosticVariant;
use crate::diagnostics::error::{
DiagnosticDeriveError, span_err, throw_invalid_attr, throw_span_err,
};
use crate::diagnostics::message::Message;
use crate::diagnostics::utils::{
FieldInfo, FieldInnerTy, FieldMap, SetOnce, SpannedOption, SubdiagnosticKind,
build_field_mapping, is_doc_comment, report_error_if_not_applied_to_span, report_type_error,
@ -41,9 +42,9 @@ pub(crate) struct DiagnosticDeriveVariantBuilder {
/// derive builder.
pub field_map: FieldMap,
/// Slug is a mandatory part of the struct attribute as corresponds to the Fluent message that
/// Message is a mandatory part of the struct attribute as corresponds to the Fluent message that
/// has the actual diagnostic message.
pub slug: Option<Path>,
pub message: Option<Message>,
/// Error codes are a optional part of the struct attribute - this is only set to detect
/// multiple specifications.
@ -90,7 +91,7 @@ impl DiagnosticDeriveKind {
span,
field_map: build_field_mapping(variant),
formatting_init: TokenStream::new(),
slug: None,
message: None,
code: None,
};
f(builder, variant)
@ -105,8 +106,8 @@ impl DiagnosticDeriveKind {
}
impl DiagnosticDeriveVariantBuilder {
pub(crate) fn primary_message(&self) -> Option<&Path> {
match self.slug.as_ref() {
pub(crate) fn primary_message(&self) -> Option<&Message> {
match self.message.as_ref() {
None => {
span_err(self.span, "diagnostic slug not specified")
.help(
@ -116,7 +117,7 @@ impl DiagnosticDeriveVariantBuilder {
.emit();
None
}
Some(slug)
Some(Message::Slug(slug))
if let Some(Mismatch { slug_name, crate_name, slug_prefix }) =
Mismatch::check(slug) =>
{
@ -126,7 +127,7 @@ impl DiagnosticDeriveVariantBuilder {
.emit();
None
}
Some(slug) => Some(slug),
Some(msg) => Some(msg),
}
}
@ -136,7 +137,8 @@ impl DiagnosticDeriveVariantBuilder {
let ast = variant.ast();
let attrs = &ast.attrs;
let preamble = attrs.iter().map(|attr| {
self.generate_structure_code_for_attr(attr).unwrap_or_else(|v| v.to_compile_error())
self.generate_structure_code_for_attr(attr, variant)
.unwrap_or_else(|v| v.to_compile_error())
});
quote! {
@ -154,7 +156,7 @@ impl DiagnosticDeriveVariantBuilder {
}
// ..and then subdiagnostic additions.
for binding in variant.bindings().iter().filter(|bi| !should_generate_arg(bi.ast())) {
body.extend(self.generate_field_attrs_code(binding));
body.extend(self.generate_field_attrs_code(binding, variant));
}
body
}
@ -163,7 +165,7 @@ impl DiagnosticDeriveVariantBuilder {
fn parse_subdiag_attribute(
&self,
attr: &Attribute,
) -> Result<Option<(SubdiagnosticKind, Path, bool)>, DiagnosticDeriveError> {
) -> Result<Option<(SubdiagnosticKind, Message, bool)>, DiagnosticDeriveError> {
let Some(subdiag) = SubdiagnosticVariant::from_attr(attr, &self.field_map)? else {
// Some attributes aren't errors - like documentation comments - but also aren't
// subdiagnostics.
@ -175,15 +177,18 @@ impl DiagnosticDeriveVariantBuilder {
.help("consider creating a `Subdiagnostic` instead"));
}
let slug = subdiag.slug.unwrap_or_else(|| match subdiag.kind {
SubdiagnosticKind::Label => parse_quote! { _subdiag::label },
SubdiagnosticKind::Note => parse_quote! { _subdiag::note },
SubdiagnosticKind::NoteOnce => parse_quote! { _subdiag::note_once },
SubdiagnosticKind::Help => parse_quote! { _subdiag::help },
SubdiagnosticKind::HelpOnce => parse_quote! { _subdiag::help_once },
SubdiagnosticKind::Warn => parse_quote! { _subdiag::warn },
SubdiagnosticKind::Suggestion { .. } => parse_quote! { _subdiag::suggestion },
SubdiagnosticKind::MultipartSuggestion { .. } => unreachable!(),
// For subdiagnostics without a message specified, insert a placeholder slug
let slug = subdiag.slug.unwrap_or_else(|| {
Message::Slug(match subdiag.kind {
SubdiagnosticKind::Label => parse_quote! { _subdiag::label },
SubdiagnosticKind::Note => parse_quote! { _subdiag::note },
SubdiagnosticKind::NoteOnce => parse_quote! { _subdiag::note_once },
SubdiagnosticKind::Help => parse_quote! { _subdiag::help },
SubdiagnosticKind::HelpOnce => parse_quote! { _subdiag::help_once },
SubdiagnosticKind::Warn => parse_quote! { _subdiag::warn },
SubdiagnosticKind::Suggestion { .. } => parse_quote! { _subdiag::suggestion },
SubdiagnosticKind::MultipartSuggestion { .. } => unreachable!(),
})
});
Ok(Some((subdiag.kind, slug, false)))
@ -195,6 +200,7 @@ impl DiagnosticDeriveVariantBuilder {
fn generate_structure_code_for_attr(
&mut self,
attr: &Attribute,
variant: &VariantInfo<'_>,
) -> Result<TokenStream, DiagnosticDeriveError> {
// Always allow documentation comments.
if is_doc_comment(attr) {
@ -210,13 +216,28 @@ impl DiagnosticDeriveVariantBuilder {
let mut input = &*input;
let slug_recovery_point = input.fork();
let slug = input.parse::<Path>()?;
if input.is_empty() || input.peek(Token![,]) {
self.slug = Some(slug);
if input.peek(LitStr) {
// Parse an inline message
let message = input.parse::<LitStr>()?;
if !message.suffix().is_empty() {
span_err(
message.span().unwrap(),
"Inline message is not allowed to have a suffix",
)
.emit();
}
self.message = Some(Message::Inline(message.span(), message.value()));
} else {
input = &slug_recovery_point;
// Parse a slug
let slug = input.parse::<Path>()?;
if input.is_empty() || input.peek(Token![,]) {
self.message = Some(Message::Slug(slug));
} else {
input = &slug_recovery_point;
}
}
// Parse arguments
while !input.is_empty() {
input.parse::<Token![,]>()?;
// Allow trailing comma
@ -266,7 +287,7 @@ impl DiagnosticDeriveVariantBuilder {
| SubdiagnosticKind::NoteOnce
| SubdiagnosticKind::Help
| SubdiagnosticKind::HelpOnce
| SubdiagnosticKind::Warn => Ok(self.add_subdiagnostic(&fn_ident, slug)),
| SubdiagnosticKind::Warn => Ok(self.add_subdiagnostic(&fn_ident, slug, variant)),
SubdiagnosticKind::Label | SubdiagnosticKind::Suggestion { .. } => {
throw_invalid_attr!(attr, |diag| diag
.help("`#[label]` and `#[suggestion]` can only be applied to fields"));
@ -294,7 +315,11 @@ impl DiagnosticDeriveVariantBuilder {
}
}
fn generate_field_attrs_code(&mut self, binding_info: &BindingInfo<'_>) -> TokenStream {
fn generate_field_attrs_code(
&mut self,
binding_info: &BindingInfo<'_>,
variant: &VariantInfo<'_>,
) -> TokenStream {
let field = binding_info.ast();
let field_binding = &binding_info.binding;
@ -333,6 +358,7 @@ impl DiagnosticDeriveVariantBuilder {
attr,
FieldInfo { binding: binding_info, ty: inner_ty, span: &field.span() },
binding,
variant
)
.unwrap_or_else(|v| v.to_compile_error());
@ -350,6 +376,7 @@ impl DiagnosticDeriveVariantBuilder {
attr: &Attribute,
info: FieldInfo<'_>,
binding: TokenStream,
variant: &VariantInfo<'_>,
) -> Result<TokenStream, DiagnosticDeriveError> {
let ident = &attr.path().segments.last().unwrap().ident;
let name = ident.to_string();
@ -388,7 +415,7 @@ impl DiagnosticDeriveVariantBuilder {
match subdiag {
SubdiagnosticKind::Label => {
report_error_if_not_applied_to_span(attr, &info)?;
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug))
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug, variant))
}
SubdiagnosticKind::Note
| SubdiagnosticKind::NoteOnce
@ -399,11 +426,11 @@ impl DiagnosticDeriveVariantBuilder {
if type_matches_path(inner, &["rustc_span", "Span"])
|| type_matches_path(inner, &["rustc_span", "MultiSpan"])
{
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug))
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug, variant))
} else if type_is_unit(inner)
|| (matches!(info.ty, FieldInnerTy::Plain(_)) && type_is_bool(inner))
{
Ok(self.add_subdiagnostic(&fn_ident, slug))
Ok(self.add_subdiagnostic(&fn_ident, slug, variant))
} else {
report_type_error(attr, "`Span`, `MultiSpan`, `bool` or `()`")?
}
@ -429,6 +456,7 @@ impl DiagnosticDeriveVariantBuilder {
applicability.set_once(quote! { #static_applicability }, span);
}
let message = slug.diag_message(Some(variant));
let applicability = applicability
.value()
.unwrap_or_else(|| quote! { rustc_errors::Applicability::Unspecified });
@ -438,7 +466,7 @@ impl DiagnosticDeriveVariantBuilder {
Ok(quote! {
diag.span_suggestions_with_style(
#span_field,
crate::fluent_generated::#slug,
#message,
#code_field,
#applicability,
#style
@ -455,22 +483,30 @@ impl DiagnosticDeriveVariantBuilder {
&self,
field_binding: TokenStream,
kind: &Ident,
fluent_attr_identifier: Path,
message: Message,
variant: &VariantInfo<'_>,
) -> TokenStream {
let fn_name = format_ident!("span_{}", kind);
let message = message.diag_message(Some(variant));
quote! {
diag.#fn_name(
#field_binding,
crate::fluent_generated::#fluent_attr_identifier
#message
);
}
}
/// Adds a subdiagnostic by generating a `diag.span_$kind` call with the current slug
/// and `fluent_attr_identifier`.
fn add_subdiagnostic(&self, kind: &Ident, fluent_attr_identifier: Path) -> TokenStream {
fn add_subdiagnostic(
&self,
kind: &Ident,
message: Message,
variant: &VariantInfo<'_>,
) -> TokenStream {
let message = message.diag_message(Some(variant));
quote! {
diag.#kind(crate::fluent_generated::#fluent_attr_identifier);
diag.#kind(#message);
}
}

View file

@ -0,0 +1,138 @@
use fluent_bundle::FluentResource;
use fluent_syntax::ast::{Expression, InlineExpression, Pattern, PatternElement};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::Path;
use synstructure::{Structure, VariantInfo};
use crate::diagnostics::error::span_err;
#[derive(Clone)]
pub(crate) enum Message {
Slug(Path),
Inline(Span, String),
}
impl Message {
/// Get the diagnostic message for this diagnostic
/// The passed `variant` is used to check whether all variables in the message are used.
/// For subdiagnostics, we cannot check this.
pub(crate) fn diag_message(&self, variant: Option<&VariantInfo<'_>>) -> TokenStream {
match self {
Message::Slug(slug) => {
quote! { crate::fluent_generated::#slug }
}
Message::Inline(message_span, message) => {
if let Some(variant) = variant {
verify_fluent_message(*message_span, &message, variant);
}
quote! { rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed(#message)) }
}
}
}
/// Generates a `#[test]` that verifies that all referenced variables
/// exist on this structure.
pub(crate) fn generate_test(&self, structure: &Structure<'_>) -> TokenStream {
match self {
Message::Slug(slug) => {
// FIXME: We can't identify variables in a subdiagnostic
for field in structure.variants().iter().flat_map(|v| v.ast().fields.iter()) {
for attr_name in field.attrs.iter().filter_map(|at| at.path().get_ident()) {
if attr_name == "subdiagnostic" {
return quote!();
}
}
}
use std::sync::atomic::{AtomicUsize, Ordering};
// We need to make sure that the same diagnostic slug can be used multiple times without
// causing an error, so just have a global counter here.
static COUNTER: AtomicUsize = AtomicUsize::new(0);
let slug = slug.get_ident().unwrap();
let ident = quote::format_ident!(
"verify_{slug}_{}",
COUNTER.fetch_add(1, Ordering::Relaxed)
);
let ref_slug = quote::format_ident!("{slug}_refs");
let struct_name = &structure.ast().ident;
let variables: Vec<_> = structure
.variants()
.iter()
.flat_map(|v| {
v.ast()
.fields
.iter()
.filter_map(|f| f.ident.as_ref().map(|i| i.to_string()))
})
.collect();
// tidy errors on `#[test]` outside of test files, so we use `#[test ]` to work around this
quote! {
#[cfg(test)]
#[test ]
fn #ident() {
let variables = [#(#variables),*];
for vref in crate::fluent_generated::#ref_slug {
assert!(variables.contains(vref), "{}: variable `{vref}` not found ({})", stringify!(#struct_name), stringify!(#slug));
}
}
}
}
Message::Inline(..) => {
// We don't generate a test for inline diagnostics, we can verify these at compile-time!
// This verification is done in the `diag_message` function above
quote! {}
}
}
}
}
fn verify_fluent_message(msg_span: Span, message: &str, variant: &VariantInfo<'_>) {
// Parse the fluent message
const GENERATED_MSG_ID: &str = "generated_msg";
let resource = FluentResource::try_new(format!("{GENERATED_MSG_ID} = {message}\n")).unwrap();
assert_eq!(resource.entries().count(), 1);
let Some(fluent_syntax::ast::Entry::Message(message)) = resource.get_entry(0) else {
panic!("Did not parse into a message")
};
// Check if all variables are used
let fields: Vec<String> = variant
.bindings()
.iter()
.flat_map(|b| b.ast().ident.as_ref())
.map(|id| id.to_string())
.collect();
for variable in variable_references(&message) {
if !fields.iter().any(|f| f == variable) {
span_err(msg_span.unwrap(), format!("Variable `{variable}` not found in diagnostic "))
.help(format!("Available fields: {:?}", fields.join(", ")))
.emit();
}
// assert!(, );
}
}
fn variable_references<'a>(msg: &fluent_syntax::ast::Message<&'a str>) -> Vec<&'a str> {
let mut refs = vec![];
if let Some(Pattern { elements }) = &msg.value {
for elt in elements {
if let PatternElement::Placeable {
expression: Expression::Inline(InlineExpression::VariableReference { id }),
} = elt
{
refs.push(id.name);
}
}
}
for attr in &msg.attributes {
for elt in &attr.value.elements {
if let PatternElement::Placeable {
expression: Expression::Inline(InlineExpression::VariableReference { id }),
} = elt
{
refs.push(id.name);
}
}
}
refs
}

View file

@ -1,6 +1,7 @@
mod diagnostic;
mod diagnostic_builder;
mod error;
mod message;
mod subdiagnostic;
mod utils;

View file

@ -11,6 +11,7 @@ use super::utils::SubdiagnosticVariant;
use crate::diagnostics::error::{
DiagnosticDeriveError, invalid_attr, span_err, throw_invalid_attr, throw_span_err,
};
use crate::diagnostics::message::Message;
use crate::diagnostics::utils::{
AllowMultipleAlternatives, FieldInfo, FieldInnerTy, FieldMap, SetOnce, SpannedOption,
SubdiagnosticKind, build_field_mapping, build_suggestion_code, is_doc_comment, new_code_ident,
@ -182,7 +183,9 @@ impl<'a> FromIterator<&'a SubdiagnosticKind> for KindsStatistics {
}
impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> {
fn identify_kind(&mut self) -> Result<Vec<(SubdiagnosticKind, Path)>, DiagnosticDeriveError> {
fn identify_kind(
&mut self,
) -> Result<Vec<(SubdiagnosticKind, Message)>, DiagnosticDeriveError> {
let mut kind_slugs = vec![];
for attr in self.variant.ast().attrs {
@ -532,9 +535,8 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> {
let mut calls = TokenStream::new();
for (kind, slug) in kind_slugs {
let message = format_ident!("__message");
calls.extend(
quote! { let #message = #diag.eagerly_translate(crate::fluent_generated::#slug); },
);
let message_stream = slug.diag_message(None);
calls.extend(quote! { let #message = #diag.eagerly_translate(#message_stream); });
let name = format_ident!("{}{}", if span_field.is_some() { "span_" } else { "" }, kind);
let call = match kind {

View file

@ -16,6 +16,7 @@ use super::error::invalid_attr;
use crate::diagnostics::error::{
DiagnosticDeriveError, span_err, throw_invalid_attr, throw_span_err,
};
use crate::diagnostics::message::Message;
thread_local! {
pub(crate) static CODE_IDENT_COUNT: RefCell<u32> = RefCell::new(0);
@ -587,7 +588,7 @@ pub(super) enum SubdiagnosticKind {
pub(super) struct SubdiagnosticVariant {
pub(super) kind: SubdiagnosticKind,
pub(super) slug: Option<Path>,
pub(super) slug: Option<Message>,
}
impl SubdiagnosticVariant {
@ -696,11 +697,31 @@ impl SubdiagnosticVariant {
list.parse_args_with(|input: ParseStream<'_>| {
let mut is_first = true;
while !input.is_empty() {
// Try to parse an inline diagnostic message
if input.peek(LitStr) {
let message = input.parse::<LitStr>()?;
if !message.suffix().is_empty() {
span_err(
message.span().unwrap(),
"Inline message is not allowed to have a suffix",
).emit();
}
if !input.is_empty() { input.parse::<Token![,]>()?; }
if is_first {
slug = Some(Message::Inline(message.span(), message.value()));
is_first = false;
} else {
span_err(message.span().unwrap(), "a diagnostic message must be the first argument to the attribute").emit();
}
continue
}
// Try to parse a slug instead
let arg_name: Path = input.parse::<Path>()?;
let arg_name_span = arg_name.span().unwrap();
if input.is_empty() || input.parse::<Token![,]>().is_ok() {
if is_first {
slug = Some(arg_name);
slug = Some(Message::Slug(arg_name));
is_first = false;
} else {
span_err(arg_name_span, "a diagnostic slug must be the first argument to the attribute").emit();
@ -709,6 +730,7 @@ impl SubdiagnosticVariant {
}
is_first = false;
// Try to parse an argument
match (arg_name.require_ident()?.to_string().as_str(), &mut kind) {
("code", SubdiagnosticKind::Suggestion { code_field, .. }) => {
let code_init = build_suggestion_code(

View file

@ -303,9 +303,7 @@ fn add_query_desc_cached_impl(
#[allow(unused_variables)]
pub fn #name<'tcx>(tcx: TyCtxt<'tcx>, key: crate::query::queries::#name::Key<'tcx>) -> String {
let (#tcx, #key) = (tcx, key);
::rustc_middle::ty::print::with_no_trimmed_paths!(
format!(#desc)
)
format!(#desc)
}
};

View file

@ -441,6 +441,8 @@ impl<'tcx> Place<'tcx> {
where
D: ?Sized + HasLocalDecls<'tcx>,
{
// If there's a field projection element in `projection`, we *could* skip everything
// before that, but on 2026-01-31 a perf experiment showed no benefit from doing so.
PlaceTy::from_ty(local_decls.local_decls()[local].ty).multi_projection_ty(tcx, projection)
}

View file

@ -1,16 +1,13 @@
//! Helper functions that serve as the immediate implementation of
//! `tcx.$query(..)` and its variations.
use std::fmt::Debug;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_query_system::dep_graph::{DepKind, DepNodeParams};
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{QueryCache, QueryMode, try_get_cached};
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span};
use crate::dep_graph;
use crate::query::erase::{self, Erasable, Erased};
use crate::query::plumbing::QueryVTable;
use crate::ty::TyCtxt;
/// Shared implementation of `tcx.$query(..)` and `tcx.at(span).$query(..)`
@ -80,35 +77,38 @@ where
}
/// Common implementation of query feeding, used by `define_feedable!`.
pub(crate) fn query_feed<'tcx, Cache, Value>(
pub(crate) fn query_feed<'tcx, Cache>(
tcx: TyCtxt<'tcx>,
dep_kind: DepKind,
hasher: Option<fn(&mut StableHashingContext<'_>, &Value) -> Fingerprint>,
query_vtable: &QueryVTable<'tcx, Cache>,
cache: &Cache,
key: Cache::Key,
erased: Erased<Value>,
value: Cache::Value,
) where
Cache: QueryCache<Value = Erased<Value>>,
Cache: QueryCache,
Cache::Key: DepNodeParams<TyCtxt<'tcx>>,
Value: Erasable + Debug,
{
let value = erase::restore_val::<Value>(erased);
let format_value = query_vtable.format_value;
// Check whether the in-memory cache already has a value for this key.
match try_get_cached(tcx, cache, &key) {
Some(old) => {
let old = erase::restore_val::<Value>(old);
if let Some(hasher) = hasher {
let (value_hash, old_hash): (Fingerprint, Fingerprint) = tcx
.with_stable_hashing_context(|mut hcx| {
(hasher(&mut hcx, &value), hasher(&mut hcx, &old))
});
// The query already has a cached value for this key.
// That's OK if both values are the same, i.e. they have the same hash,
// so now we check their hashes.
if let Some(hasher_fn) = query_vtable.hash_result {
let (old_hash, value_hash) = tcx.with_stable_hashing_context(|ref mut hcx| {
(hasher_fn(hcx, &old), hasher_fn(hcx, &value))
});
if old_hash != value_hash {
// We have an inconsistency. This can happen if one of the two
// results is tainted by errors. In this case, delay a bug to
// ensure compilation is doomed, and keep the `old` value.
tcx.dcx().delayed_bug(format!(
"Trying to feed an already recorded value for query {dep_kind:?} key={key:?}:\n\
old value: {old:?}\nnew value: {value:?}",
old value: {old}\nnew value: {value}",
old = format_value(&old),
value = format_value(&value),
));
}
} else {
@ -117,14 +117,24 @@ pub(crate) fn query_feed<'tcx, Cache, Value>(
// the query should not be marked `no_hash`.
bug!(
"Trying to feed an already recorded value for query {dep_kind:?} key={key:?}:\n\
old value: {old:?}\nnew value: {value:?}",
old value: {old}\nnew value: {value}",
old = format_value(&old),
value = format_value(&value),
)
}
}
None => {
// There is no cached value for this key, so feed the query by
// adding the provided value to the cache.
let dep_node = dep_graph::DepNode::construct(tcx, dep_kind, &key);
let dep_node_index = tcx.dep_graph.with_feed_task(dep_node, tcx, &value, hasher);
cache.complete(key, erased, dep_node_index);
let dep_node_index = tcx.dep_graph.with_feed_task(
dep_node,
tcx,
&value,
query_vtable.hash_result,
query_vtable.format_value,
);
cache.complete(key, value, dep_node_index);
}
}
}

View file

@ -87,7 +87,6 @@ use rustc_hir::{Crate, ItemLocalId, ItemLocalMap, PreciseCapturingArgKind, Trait
use rustc_index::IndexVec;
use rustc_lint_defs::LintId;
use rustc_macros::rustc_queries;
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{QueryMode, QueryState};
use rustc_session::Limits;
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};

View file

@ -497,18 +497,6 @@ macro_rules! define_callbacks {
};
}
macro_rules! hash_result {
([]) => {{
Some(dep_graph::hash_result)
}};
([(no_hash) $($rest:tt)*]) => {{
None
}};
([$other:tt $($modifiers:tt)*]) => {
hash_result!([$($modifiers)*])
};
}
macro_rules! define_feedable {
($($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
$(impl<'tcx, K: IntoQueryParam<$($K)*> + Copy> TyCtxtFeed<'tcx, K> {
@ -518,19 +506,17 @@ macro_rules! define_feedable {
let key = self.key().into_query_param();
let tcx = self.tcx;
let erased = queries::$name::provided_to_erased(tcx, value);
let cache = &tcx.query_system.caches.$name;
let erased_value = queries::$name::provided_to_erased(tcx, value);
let dep_kind: dep_graph::DepKind = dep_graph::dep_kinds::$name;
let hasher: Option<fn(&mut StableHashingContext<'_>, &_) -> _> = hash_result!([$($modifiers)*]);
$crate::query::inner::query_feed(
tcx,
dep_kind,
hasher,
cache,
&tcx.query_system.query_vtables.$name,
&tcx.query_system.caches.$name,
key,
erased,
erased_value,
);
}
})*

View file

@ -408,6 +408,12 @@ impl<Id: Into<DefId>> Visibility<Id> {
}
}
impl<Id: Into<DefId> + Copy> Visibility<Id> {
pub fn min(self, vis: Visibility<Id>, tcx: TyCtxt<'_>) -> Visibility<Id> {
if self.is_at_least(vis, tcx) { vis } else { self }
}
}
impl Visibility<DefId> {
pub fn expect_local(self) -> Visibility {
self.map_id(|id| id.expect_local())

View file

@ -50,9 +50,9 @@ impl<'tcx> Value<TyCtxt<'tcx>> for ty::Binder<'_, ty::FnSig<'_>> {
) -> Self {
let err = Ty::new_error(tcx, guar);
let arity = if let Some(frame) = cycle_error.cycle.get(0)
&& frame.query.dep_kind == dep_kinds::fn_sig
&& let Some(def_id) = frame.query.def_id
let arity = if let Some(info) = cycle_error.cycle.get(0)
&& info.frame.dep_kind == dep_kinds::fn_sig
&& let Some(def_id) = info.frame.def_id
&& let Some(node) = tcx.hir_get_if_local(def_id)
&& let Some(sig) = node.fn_sig()
{
@ -85,10 +85,10 @@ impl<'tcx> Value<TyCtxt<'tcx>> for Representability {
let mut item_and_field_ids = Vec::new();
let mut representable_ids = FxHashSet::default();
for info in &cycle_error.cycle {
if info.query.dep_kind == dep_kinds::representability
&& let Some(field_id) = info.query.def_id
if info.frame.dep_kind == dep_kinds::representability
&& let Some(field_id) = info.frame.def_id
&& let Some(field_id) = field_id.as_local()
&& let Some(DefKind::Field) = info.query.info.def_kind
&& let Some(DefKind::Field) = info.frame.info.def_kind
{
let parent_id = tcx.parent(field_id.to_def_id());
let item_id = match tcx.def_kind(parent_id) {
@ -99,8 +99,8 @@ impl<'tcx> Value<TyCtxt<'tcx>> for Representability {
}
}
for info in &cycle_error.cycle {
if info.query.dep_kind == dep_kinds::representability_adt_ty
&& let Some(def_id) = info.query.def_id_for_ty_in_cycle
if info.frame.dep_kind == dep_kinds::representability_adt_ty
&& let Some(def_id) = info.frame.def_id_for_ty_in_cycle
&& let Some(def_id) = def_id.as_local()
&& !item_and_field_ids.iter().any(|&(id, _)| id == def_id)
{
@ -141,9 +141,9 @@ impl<'tcx> Value<TyCtxt<'tcx>> for &[ty::Variance] {
search_for_cycle_permutation(
&cycle_error.cycle,
|cycle| {
if let Some(frame) = cycle.get(0)
&& frame.query.dep_kind == dep_kinds::variances_of
&& let Some(def_id) = frame.query.def_id
if let Some(info) = cycle.get(0)
&& info.frame.dep_kind == dep_kinds::variances_of
&& let Some(def_id) = info.frame.def_id
{
let n = tcx.generics_of(def_id).own_params.len();
ControlFlow::Break(vec![ty::Bivariant; n].leak())
@ -189,8 +189,8 @@ impl<'tcx, T> Value<TyCtxt<'tcx>> for Result<T, &'_ ty::layout::LayoutError<'_>>
let diag = search_for_cycle_permutation(
&cycle_error.cycle,
|cycle| {
if cycle[0].query.dep_kind == dep_kinds::layout_of
&& let Some(def_id) = cycle[0].query.def_id_for_ty_in_cycle
if cycle[0].frame.dep_kind == dep_kinds::layout_of
&& let Some(def_id) = cycle[0].frame.def_id_for_ty_in_cycle
&& let Some(def_id) = def_id.as_local()
&& let def_kind = tcx.def_kind(def_id)
&& matches!(def_kind, DefKind::Closure)
@ -213,18 +213,18 @@ impl<'tcx, T> Value<TyCtxt<'tcx>> for Result<T, &'_ ty::layout::LayoutError<'_>>
tcx.def_kind_descr_article(def_kind, def_id.to_def_id()),
tcx.def_kind_descr(def_kind, def_id.to_def_id()),
);
for (i, frame) in cycle.iter().enumerate() {
if frame.query.dep_kind != dep_kinds::layout_of {
for (i, info) in cycle.iter().enumerate() {
if info.frame.dep_kind != dep_kinds::layout_of {
continue;
}
let Some(frame_def_id) = frame.query.def_id_for_ty_in_cycle else {
let Some(frame_def_id) = info.frame.def_id_for_ty_in_cycle else {
continue;
};
let Some(frame_coroutine_kind) = tcx.coroutine_kind(frame_def_id) else {
continue;
};
let frame_span =
frame.query.info.default_span(cycle[(i + 1) % cycle.len()].span);
info.frame.info.default_span(cycle[(i + 1) % cycle.len()].span);
if frame_span.is_dummy() {
continue;
}

View file

@ -8,11 +8,11 @@ edition = "2024"
polonius-engine = "0.13.0"
regex = "1"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_graphviz = { path = "../rustc_graphviz" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }

View file

@ -1,9 +1,3 @@
mir_dataflow_duplicate_values_for =
duplicate values for `{$name}`
mir_dataflow_path_must_end_in_filename =
path must end in a filename
mir_dataflow_peek_argument_not_a_local =
rustc_peek: argument was not a local
@ -19,11 +13,5 @@ mir_dataflow_peek_must_be_not_temporary =
mir_dataflow_peek_must_be_place_or_ref_place =
rustc_peek: argument expression must be either `place` or `&place`
mir_dataflow_requires_an_argument =
`{$name}` requires an argument
mir_dataflow_stop_after_dataflow_ended_compilation =
stop_after_dataflow ended compilation
mir_dataflow_unknown_formatter =
unknown formatter

View file

@ -1,35 +1,5 @@
use rustc_macros::Diagnostic;
use rustc_span::{Span, Symbol};
#[derive(Diagnostic)]
#[diag(mir_dataflow_path_must_end_in_filename)]
pub(crate) struct PathMustEndInFilename {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(mir_dataflow_unknown_formatter)]
pub(crate) struct UnknownFormatter {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(mir_dataflow_duplicate_values_for)]
pub(crate) struct DuplicateValuesFor {
#[primary_span]
pub span: Span,
pub name: Symbol,
}
#[derive(Diagnostic)]
#[diag(mir_dataflow_requires_an_argument)]
pub(crate) struct RequiresAnArgument {
#[primary_span]
pub span: Span,
pub name: Symbol,
}
use rustc_span::Span;
#[derive(Diagnostic)]
#[diag(mir_dataflow_stop_after_dataflow_ended_compilation)]

View file

@ -7,6 +7,9 @@ use std::sync::OnceLock;
use std::{io, ops, str};
use regex::Regex;
use rustc_graphviz as dot;
use rustc_hir::attrs::{AttributeKind, BorrowckGraphvizFormatKind, RustcMirKind};
use rustc_hir::find_attr;
use rustc_index::bit_set::DenseBitSet;
use rustc_middle::mir::{
self, BasicBlock, Body, Location, MirDumper, graphviz_safe_def_name, traversal,
@ -14,17 +17,12 @@ use rustc_middle::mir::{
use rustc_middle::ty::TyCtxt;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_span::def_id::DefId;
use rustc_span::{Symbol, sym};
use tracing::debug;
use {rustc_ast as ast, rustc_graphviz as dot};
use super::fmt::{DebugDiffWithAdapter, DebugWithAdapter, DebugWithContext};
use super::{
Analysis, CallReturnPlaces, Direction, Results, ResultsCursor, ResultsVisitor, visit_results,
};
use crate::errors::{
DuplicateValuesFor, PathMustEndInFilename, RequiresAnArgument, UnknownFormatter,
};
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
/// `rustc_mir` attributes and `-Z dump-mir-dataflow`. The `Result` in and the `Results` out are
@ -43,10 +41,7 @@ where
use std::io::Write;
let def_id = body.source.def_id();
let Ok(attrs) = RustcMirAttrs::parse(tcx, def_id) else {
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
return Ok(());
};
let attrs = RustcMirAttrs::parse(tcx, def_id);
let file = try {
match attrs.output_path(A::NAME) {
@ -72,10 +67,7 @@ where
Err(e) => return Err(e),
};
let style = match attrs.formatter {
Some(sym::two_phase) => OutputStyle::BeforeAndAfter,
_ => OutputStyle::AfterOnly,
};
let style = attrs.formatter.unwrap_or(OutputStyle::AfterOnly);
let mut buf = Vec::new();
@ -98,71 +90,33 @@ where
#[derive(Default)]
struct RustcMirAttrs {
basename_and_suffix: Option<PathBuf>,
formatter: Option<Symbol>,
formatter: Option<OutputStyle>,
}
impl RustcMirAttrs {
fn parse(tcx: TyCtxt<'_>, def_id: DefId) -> Result<Self, ()> {
let mut result = Ok(());
fn parse(tcx: TyCtxt<'_>, def_id: DefId) -> Self {
let mut ret = RustcMirAttrs::default();
let rustc_mir_attrs = tcx
.get_attrs(def_id, sym::rustc_mir)
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
for attr in rustc_mir_attrs {
let attr_result = match attr.name() {
Some(name @ sym::borrowck_graphviz_postflow) => {
Self::set_field(&mut ret.basename_and_suffix, tcx, name, &attr, |s| {
let path = PathBuf::from(s.to_string());
match path.file_name() {
Some(_) => Ok(path),
None => {
tcx.dcx().emit_err(PathMustEndInFilename { span: attr.span() });
Err(())
let attrs = tcx.get_all_attrs(def_id);
if let Some(rustc_mir_attrs) = find_attr!(attrs, AttributeKind::RustcMir(kind) => kind) {
for attr in rustc_mir_attrs {
match attr {
RustcMirKind::BorrowckGraphvizPostflow { path } => {
ret.basename_and_suffix = Some(path.clone());
}
RustcMirKind::BorrowckGraphvizFormat { format } => {
ret.formatter = match format {
BorrowckGraphvizFormatKind::TwoPhase => {
Some(OutputStyle::BeforeAndAfter)
}
}
})
}
Some(name @ sym::borrowck_graphviz_format) => {
Self::set_field(&mut ret.formatter, tcx, name, &attr, |s| match s {
sym::two_phase => Ok(s),
_ => {
tcx.dcx().emit_err(UnknownFormatter { span: attr.span() });
Err(())
}
})
}
_ => Ok(()),
};
result = result.and(attr_result);
};
}
_ => (),
};
}
}
result.map(|()| ret)
}
fn set_field<T>(
field: &mut Option<T>,
tcx: TyCtxt<'_>,
name: Symbol,
attr: &ast::MetaItemInner,
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
) -> Result<(), ()> {
if field.is_some() {
tcx.dcx().emit_err(DuplicateValuesFor { span: attr.span(), name });
return Err(());
}
if let Some(s) = attr.value_str() {
*field = Some(mapper(s)?);
Ok(())
} else {
tcx.dcx()
.emit_err(RequiresAnArgument { span: attr.span(), name: attr.name().unwrap() });
Err(())
}
ret
}
/// Returns the path where dataflow results should be written, or `None`

View file

@ -1,8 +1,8 @@
use rustc_ast::MetaItem;
use rustc_hir::attrs::{AttributeKind, RustcMirKind};
use rustc_hir::find_attr;
use rustc_middle::mir::{self, Body, Local, Location};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::DefId;
use rustc_span::{Span, Symbol, sym};
use rustc_span::{Span, sym};
use tracing::{debug, info};
use crate::errors::{
@ -14,52 +14,37 @@ use crate::impls::{MaybeInitializedPlaces, MaybeLiveLocals, MaybeUninitializedPl
use crate::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
use crate::{Analysis, JoinSemiLattice, ResultsCursor};
fn has_rustc_mir_with(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Option<MetaItem> {
for attr in tcx.get_attrs(def_id, sym::rustc_mir) {
let items = attr.meta_item_list();
for item in items.iter().flat_map(|l| l.iter()) {
match item.meta_item() {
Some(mi) if mi.has_name(name) => return Some(mi.clone()),
_ => continue,
}
}
}
None
}
pub fn sanity_check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
let def_id = body.source.def_id();
if !tcx.has_attr(def_id, sym::rustc_mir) {
debug!("skipping rustc_peek::SanityCheck on {}", tcx.def_path_str(def_id));
return;
} else {
let attrs = tcx.get_all_attrs(def_id);
if let Some(kind) = find_attr!(attrs, AttributeKind::RustcMir(kind) => kind) {
let move_data = MoveData::gather_moves(body, tcx, |_| true);
debug!("running rustc_peek::SanityCheck on {}", tcx.def_path_str(def_id));
}
if kind.contains(&RustcMirKind::PeekMaybeInit) {
let flow_inits = MaybeInitializedPlaces::new(tcx, body, &move_data)
.iterate_to_fixpoint(tcx, body, None)
.into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_inits);
}
let move_data = MoveData::gather_moves(body, tcx, |_| true);
if kind.contains(&RustcMirKind::PeekMaybeUninit) {
let flow_uninits = MaybeUninitializedPlaces::new(tcx, body, &move_data)
.iterate_to_fixpoint(tcx, body, None)
.into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_uninits);
}
if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_init).is_some() {
let flow_inits = MaybeInitializedPlaces::new(tcx, body, &move_data)
.iterate_to_fixpoint(tcx, body, None)
.into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_inits);
}
if kind.contains(&RustcMirKind::PeekLiveness) {
let flow_liveness =
MaybeLiveLocals.iterate_to_fixpoint(tcx, body, None).into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_liveness);
}
if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_uninit).is_some() {
let flow_uninits = MaybeUninitializedPlaces::new(tcx, body, &move_data)
.iterate_to_fixpoint(tcx, body, None)
.into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_uninits);
}
if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_liveness).is_some() {
let flow_liveness =
MaybeLiveLocals.iterate_to_fixpoint(tcx, body, None).into_results_cursor(body);
sanity_check_via_rustc_peek(tcx, flow_liveness);
}
if has_rustc_mir_with(tcx, def_id, sym::stop_after_dataflow).is_some() {
tcx.dcx().emit_fatal(StopAfterDataFlowEndedCompilation);
if kind.contains(&RustcMirKind::StopAfterDataflow) {
tcx.dcx().emit_fatal(StopAfterDataFlowEndedCompilation);
}
} else {
debug!("skipping rustc_peek::SanityCheck on {}", tcx.def_path_str(def_id));
}
}

View file

@ -55,12 +55,10 @@ pub(super) fn extract_refined_covspans<'tcx>(
}
// Each pushed covspan should have the same context as the body span.
// If it somehow doesn't, discard the covspan, or panic in debug builds.
// If it somehow doesn't, discard the covspan.
if !body_span.eq_ctxt(covspan_span) {
debug_assert!(
false,
"span context mismatch: body_span={body_span:?}, covspan.span={covspan_span:?}"
);
// FIXME(Zalathar): Investigate how and why this is triggered
// by `tests/coverage/macros/context-mismatch-issue-147339.rs`.
return false;
}

View file

@ -1,8 +1,8 @@
//! Performs various peephole optimizations.
use rustc_abi::ExternAbi;
use rustc_ast::attr;
use rustc_hir::LangItem;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::{LangItem, find_attr};
use rustc_middle::bug;
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::*;
@ -31,7 +31,7 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let preserve_ub_checks =
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
find_attr!(tcx.hir_krate_attrs(), AttributeKind::RustcPreserveUbChecks);
if !preserve_ub_checks {
SimplifyUbCheck { tcx }.visit_body(body);
}

View file

@ -1243,9 +1243,12 @@ struct TransferFunction<'a, 'tcx> {
impl<'tcx> Visitor<'tcx> for TransferFunction<'_, 'tcx> {
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
match statement.kind {
// `ForLet(None)` fake read erroneously marks the just-assigned local as live.
// This defeats the purpose of the analysis for `let` bindings.
StatementKind::FakeRead(box (FakeReadCause::ForLet(None), _)) => return,
// `ForLet(None)` and `ForGuardBinding` fake reads erroneously mark the just-assigned
// locals as live. This defeats the purpose of the analysis for such bindings.
StatementKind::FakeRead(box (
FakeReadCause::ForLet(None) | FakeReadCause::ForGuardBinding,
_,
)) => return,
// Handle self-assignment by restricting the read/write they do.
StatementKind::Assign(box (ref dest, ref rvalue))
if self.self_assignment.contains(&location) =>

View file

@ -299,6 +299,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::RustcDumpVtable(..)
| AttributeKind::RustcDynIncompatibleTrait(..)
| AttributeKind::RustcHasIncoherentInherentImpls
| AttributeKind::RustcHiddenTypeOfOpaques
| AttributeKind::RustcLayout(..)
| AttributeKind::RustcLayoutScalarValidRangeEnd(..)
| AttributeKind::RustcLayoutScalarValidRangeStart(..)
@ -308,6 +309,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::RustcLintUntrackedQueryInformation
| AttributeKind::RustcMacroTransparency(_)
| AttributeKind::RustcMain
| AttributeKind::RustcMir(_)
| AttributeKind::RustcNeverReturnsNullPointer
| AttributeKind::RustcNoImplicitAutorefs
| AttributeKind::RustcNonConstTraitMethod
@ -318,6 +320,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::RustcParenSugar(..)
| AttributeKind::RustcPassByValue (..)
| AttributeKind::RustcPassIndirectlyInNonRusticAbis(..)
| AttributeKind::RustcPreserveUbChecks
| AttributeKind::RustcReallocator
| AttributeKind::RustcScalableVector { .. }
| AttributeKind::RustcShouldNotBeCalledOnConstItems(..)
@ -390,7 +393,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| sym::rustc_capture_analysis
| sym::rustc_regions
| sym::rustc_strict_coherence
| sym::rustc_hidden_type_of_opaques
| sym::rustc_mir
| sym::rustc_effective_visibility
| sym::rustc_outlives
@ -406,8 +408,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| sym::register_tool
| sym::rustc_no_implicit_bounds
| sym::test_runner
| sym::reexport_test_harness_main
| sym::rustc_preserve_ub_checks,
| sym::reexport_test_harness_main,
..
] => {}
[name, rest@..] => {

View file

@ -2,10 +2,13 @@
// tidy-alphabetical-start
#![allow(internal_features)]
#![feature(adt_const_params)]
#![feature(min_specialization)]
#![feature(rustc_attrs)]
// tidy-alphabetical-end
use std::marker::ConstParamTy;
use rustc_data_structures::stable_hasher::HashStable;
use rustc_data_structures::sync::AtomicU64;
use rustc_middle::arena::Arena;
@ -35,29 +38,34 @@ pub use crate::plumbing::{QueryCtxt, query_key_hash_verify_all};
mod profiling_support;
pub use self::profiling_support::alloc_self_profile_query_strings;
#[derive(ConstParamTy)] // Allow this struct to be used for const-generic values.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
struct QueryFlags {
/// True if this query has the `anon` modifier.
is_anon: bool,
/// True if this query has the `depth_limit` modifier.
is_depth_limit: bool,
/// True if this query has the `feedable` modifier.
is_feedable: bool,
}
/// Combines a [`QueryVTable`] with some additional compile-time booleans
/// to implement [`QueryDispatcher`], for use by code in [`rustc_query_system`].
///
/// Baking these boolean flags into the type gives a modest but measurable
/// improvement to compiler perf and compiler code size; see
/// <https://github.com/rust-lang/rust/pull/151633>.
struct SemiDynamicQueryDispatcher<
'tcx,
C: QueryCache,
const ANON: bool,
const DEPTH_LIMIT: bool,
const FEEDABLE: bool,
> {
struct SemiDynamicQueryDispatcher<'tcx, C: QueryCache, const FLAGS: QueryFlags> {
vtable: &'tcx QueryVTable<'tcx, C>,
}
// Manually implement Copy/Clone, because deriving would put trait bounds on the cache type.
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Copy
for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
impl<'tcx, C: QueryCache, const FLAGS: QueryFlags> Copy
for SemiDynamicQueryDispatcher<'tcx, C, FLAGS>
{
}
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Clone
for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
impl<'tcx, C: QueryCache, const FLAGS: QueryFlags> Clone
for SemiDynamicQueryDispatcher<'tcx, C, FLAGS>
{
fn clone(&self) -> Self {
*self
@ -65,8 +73,8 @@ impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDA
}
// This is `impl QueryDispatcher for SemiDynamicQueryDispatcher`.
impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool>
QueryDispatcher<'tcx> for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
impl<'tcx, C: QueryCache, const FLAGS: QueryFlags> QueryDispatcher<'tcx>
for SemiDynamicQueryDispatcher<'tcx, C, FLAGS>
where
for<'a> C::Key: HashStable<StableHashingContext<'a>>,
{
@ -86,10 +94,7 @@ where
}
#[inline(always)]
fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState<'tcx, Self::Key>
where
QueryCtxt<'tcx>: 'a,
{
fn query_state(self, qcx: QueryCtxt<'tcx>) -> &'tcx QueryState<'tcx, Self::Key> {
// Safety:
// This is just manually doing the subfield referencing through pointer math.
unsafe {
@ -100,7 +105,7 @@ where
}
#[inline(always)]
fn query_cache<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a Self::Cache {
fn query_cache(self, qcx: QueryCtxt<'tcx>) -> &'tcx Self::Cache {
// Safety:
// This is just manually doing the subfield referencing through pointer math.
unsafe {
@ -158,7 +163,7 @@ where
#[inline(always)]
fn anon(self) -> bool {
ANON
FLAGS.is_anon
}
#[inline(always)]
@ -168,12 +173,12 @@ where
#[inline(always)]
fn depth_limit(self) -> bool {
DEPTH_LIMIT
FLAGS.is_depth_limit
}
#[inline(always)]
fn feedable(self) -> bool {
FEEDABLE
FLAGS.is_feedable
}
#[inline(always)]
@ -216,12 +221,12 @@ trait QueryDispatcherUnerased<'tcx> {
) -> Self::UnerasedValue;
}
pub fn query_system<'a>(
pub fn query_system<'tcx>(
local_providers: Providers,
extern_providers: ExternProviders,
on_disk_cache: Option<OnDiskCache>,
incremental: bool,
) -> QuerySystem<'a> {
) -> QuerySystem<'tcx> {
QuerySystem {
states: Default::default(),
arenas: Default::default(),

View file

@ -24,7 +24,7 @@ use rustc_middle::ty::codec::TyEncoder;
use rustc_middle::ty::print::with_reduced_queries;
use rustc_middle::ty::tls::{self, ImplicitCtxt};
use rustc_middle::ty::{self, TyCtxt};
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
use rustc_query_system::dep_graph::{DepNodeParams, FingerprintStyle, HasDepContext};
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{
QueryCache, QueryContext, QueryDispatcher, QueryJobId, QueryMap, QuerySideEffect,
@ -48,6 +48,25 @@ impl<'tcx> QueryCtxt<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>) -> Self {
QueryCtxt { tcx }
}
fn depth_limit_error(self, job: QueryJobId) {
let query_map = self
.collect_active_jobs_from_all_queries(true)
.expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
limit => limit * 2,
};
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote { desc: info.frame.info.extract().description, depth },
suggested_limit,
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
}
}
impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
@ -81,7 +100,7 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
tls::with_related_context(self.tcx, |icx| icx.query)
}
/// Returns a map of currently active query jobs.
/// Returns a map of currently active query jobs, collected from all queries.
///
/// If `require_complete` is `true`, this function locks all shards of the
/// query results to produce a complete map, which always returns `Ok`.
@ -91,12 +110,15 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
/// Prefer passing `false` to `require_complete` to avoid potential deadlocks,
/// especially when called from within a deadlock handler, unless a
/// complete map is needed and no deadlock is possible at this call site.
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>> {
fn collect_active_jobs_from_all_queries(
self,
require_complete: bool,
) -> Result<QueryMap<'tcx>, QueryMap<'tcx>> {
let mut jobs = QueryMap::default();
let mut complete = true;
for collect in super::COLLECT_ACTIVE_JOBS.iter() {
if collect(self.tcx, &mut jobs, require_complete).is_none() {
for gather_fn in crate::PER_QUERY_GATHER_ACTIVE_JOBS_FNS.iter() {
if gather_fn(self.tcx, &mut jobs, require_complete).is_none() {
complete = false;
}
}
@ -104,13 +126,6 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
if complete { Ok(jobs) } else { Err(jobs) }
}
fn lift_query_info(
self,
info: &QueryStackDeferred<'tcx>,
) -> rustc_query_system::query::QueryStackFrameExtra {
info.extract()
}
// Interactions with on_disk_cache
fn load_side_effect(
self,
@ -162,26 +177,6 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
tls::enter_context(&new_icx, compute)
})
}
fn depth_limit_error(self, job: QueryJobId) {
let query_map = self.collect_active_jobs(true).expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
limit => limit * 2,
};
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote {
desc: self.lift_query_info(&info.query.info).description,
depth,
},
suggested_limit,
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
}
}
pub(super) fn try_mark_green<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool {
@ -415,9 +410,8 @@ pub(crate) fn query_key_hash_verify<'tcx>(
) {
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());
let mut map = UnordMap::default();
let cache = query.query_cache(qcx);
let mut map = UnordMap::with_capacity(cache.len());
cache.iter(&mut |key, _, _| {
let node = DepNode::construct(qcx.tcx, query.dep_kind(), key);
if let Some(other_key) = map.insert(node, *key) {
@ -519,7 +513,11 @@ pub(crate) fn make_dep_kind_vtable_for_query<'tcx, Q>(
where
Q: QueryDispatcherUnerased<'tcx>,
{
let fingerprint_style = <Q::Dispatcher as QueryDispatcher>::Key::fingerprint_style();
let fingerprint_style = if is_anon {
FingerprintStyle::Opaque
} else {
<Q::Dispatcher as QueryDispatcher>::Key::fingerprint_style()
};
if is_anon || !fingerprint_style.reconstructible() {
return DepKindVTable {
@ -708,14 +706,18 @@ macro_rules! define_queries {
data: PhantomData<&'tcx ()>
}
const FLAGS: QueryFlags = QueryFlags {
is_anon: is_anon!([$($modifiers)*]),
is_depth_limit: depth_limit!([$($modifiers)*]),
is_feedable: feedable!([$($modifiers)*]),
};
impl<'tcx> QueryDispatcherUnerased<'tcx> for QueryType<'tcx> {
type UnerasedValue = queries::$name::Value<'tcx>;
type Dispatcher = SemiDynamicQueryDispatcher<
'tcx,
queries::$name::Storage<'tcx>,
{ is_anon!([$($modifiers)*]) },
{ depth_limit!([$($modifiers)*]) },
{ feedable!([$($modifiers)*]) },
FLAGS,
>;
const NAME: &'static &'static str = &stringify!($name);
@ -733,22 +735,28 @@ macro_rules! define_queries {
}
}
pub(crate) fn collect_active_jobs<'tcx>(
/// Internal per-query plumbing for collecting the set of active jobs for this query.
///
/// Should only be called through `PER_QUERY_GATHER_ACTIVE_JOBS_FNS`.
pub(crate) fn gather_active_jobs<'tcx>(
tcx: TyCtxt<'tcx>,
qmap: &mut QueryMap<'tcx>,
require_complete: bool,
) -> Option<()> {
let make_query = |tcx, key| {
let make_frame = |tcx, key| {
let kind = rustc_middle::dep_graph::dep_kinds::$name;
let name = stringify!($name);
$crate::plumbing::create_query_frame(tcx, rustc_middle::query::descs::$name, key, kind, name)
};
let res = tcx.query_system.states.$name.collect_active_jobs(
// Call `gather_active_jobs_inner` to do the actual work.
let res = tcx.query_system.states.$name.gather_active_jobs_inner(
tcx,
make_query,
make_frame,
qmap,
require_complete,
);
// this can be called during unwinding, and the function has a `try_`-prefix, so
// don't `unwrap()` here, just manually check for `None` and do best-effort error
// reporting.
@ -818,10 +826,17 @@ macro_rules! define_queries {
// These arrays are used for iteration and can't be indexed by `DepKind`.
const COLLECT_ACTIVE_JOBS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, bool) -> Option<()>
] =
&[$(query_impl::$name::collect_active_jobs),*];
/// Used by `collect_active_jobs_from_all_queries` to iterate over all
/// queries, and gather the active jobs for each query.
///
/// (We arbitrarily use the word "gather" when collecting the jobs for
/// each individual query, so that we have distinct function names to
/// grep for.)
const PER_QUERY_GATHER_ACTIVE_JOBS_FNS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, require_complete: bool) -> Option<()>
] = &[
$(query_impl::$name::gather_active_jobs),*
];
const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryKeyStringCache)

View file

@ -11,7 +11,6 @@ rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hashes = { path = "../rustc_hashes" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }

View file

@ -1,30 +0,0 @@
query_system_cycle = cycle detected when {$stack_bottom}
.note = see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information
query_system_cycle_recursive_trait_alias = trait aliases cannot be recursive
query_system_cycle_recursive_ty_alias = type aliases cannot be recursive
query_system_cycle_recursive_ty_alias_help1 = consider using a struct, enum, or union instead to break the cycle
query_system_cycle_recursive_ty_alias_help2 = see <https://doc.rust-lang.org/reference/types.html#recursive-types> for more information
query_system_cycle_stack_middle = ...which requires {$desc}...
query_system_cycle_stack_multiple = ...which again requires {$stack_bottom}, completing the cycle
query_system_cycle_stack_single = ...which immediately requires {$stack_bottom} again
query_system_cycle_usage = cycle used when {$usage}
query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node}
query_system_increment_compilation_note1 = please follow the instructions below to create a bug report with the provided information
query_system_increment_compilation_note2 = for incremental compilation bugs, having a reproduction is vital
query_system_increment_compilation_note3 = an ideal reproduction consists of the code before and some patch that then triggers the bug when applied and compiled again
query_system_increment_compilation_note4 = as a workaround, you can run {$run_cmd} to allow your project to compile
query_system_overflow_note = query depth increased by {$depth} when {$desc}
query_system_query_overflow = queries overflow the depth limit!
.help = consider increasing the recursion limit by adding a `#![recursion_limit = "{$suggested_limit}"]` attribute to your crate (`{$crate_name}`)
query_system_reentrant = internal compiler error: reentrant incremental verify failure, suppressing message

View file

@ -237,8 +237,9 @@ pub struct DepKindVTable<Tcx: DepContext> {
/// cached within one compiler invocation.
pub is_eval_always: bool,
/// Whether the query key can be recovered from the hashed fingerprint.
/// See [DepNodeParams] trait for the behaviour of each key type.
/// Indicates whether and how the query key can be recovered from its hashed fingerprint.
///
/// The [`DepNodeParams`] trait determines the fingerprint style for each key type.
pub fingerprint_style: FingerprintStyle,
/// The red/green evaluation system will try to mark a specific DepNode in the

View file

@ -561,12 +561,13 @@ impl<D: Deps> DepGraph<D> {
/// FIXME: If the code is changed enough for this node to be marked before requiring the
/// caller's node, we suppose that those changes will be enough to mark this node red and
/// force a recomputation using the "normal" way.
pub fn with_feed_task<Ctxt: DepContext<Deps = D>, R: Debug>(
pub fn with_feed_task<Ctxt: DepContext<Deps = D>, R>(
&self,
node: DepNode,
cx: Ctxt,
result: &R,
hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
format_value_fn: fn(&R) -> String,
) -> DepNodeIndex {
if let Some(data) = self.data.as_ref() {
// The caller query has more dependencies than the node we are creating. We may
@ -584,7 +585,7 @@ impl<D: Deps> DepGraph<D> {
result,
prev_index,
hash_result,
|value| format!("{value:?}"),
format_value_fn,
);
#[cfg(debug_assertions)]
@ -872,6 +873,8 @@ impl<D: Deps> DepGraphData<D> {
// Return None if the dep node didn't exist in the previous session
let prev_index = self.previous.node_to_index_opt(dep_node)?;
debug_assert_eq!(self.previous.index_to_node(prev_index), dep_node);
match self.colors.get(prev_index) {
DepNodeColor::Green(dep_node_index) => Some((prev_index, dep_node_index)),
DepNodeColor::Red => None,
@ -880,7 +883,7 @@ impl<D: Deps> DepGraphData<D> {
// in the previous compilation session too, so we can try to
// mark it as green by recursively marking all of its
// dependencies green.
self.try_mark_previous_green(qcx, prev_index, dep_node, None)
self.try_mark_previous_green(qcx, prev_index, None)
.map(|dep_node_index| (prev_index, dep_node_index))
}
}
@ -928,8 +931,7 @@ impl<D: Deps> DepGraphData<D> {
dep_dep_node, dep_dep_node.hash,
);
let node_index =
self.try_mark_previous_green(qcx, parent_dep_node_index, dep_dep_node, Some(frame));
let node_index = self.try_mark_previous_green(qcx, parent_dep_node_index, Some(frame));
if node_index.is_some() {
debug!("managed to MARK dependency {dep_dep_node:?} as green");
@ -981,15 +983,15 @@ impl<D: Deps> DepGraphData<D> {
&self,
qcx: Qcx,
prev_dep_node_index: SerializedDepNodeIndex,
dep_node: &DepNode,
frame: Option<&MarkFrame<'_>>,
) -> Option<DepNodeIndex> {
let frame = MarkFrame { index: prev_dep_node_index, parent: frame };
// We never try to mark eval_always nodes as green
debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind));
debug_assert_eq!(self.previous.index_to_node(prev_dep_node_index), dep_node);
debug_assert!(
!qcx.dep_context()
.is_eval_always(self.previous.index_to_node(prev_dep_node_index).kind)
);
let prev_deps = self.previous.edge_targets_from(prev_dep_node_index);
@ -1010,7 +1012,10 @@ impl<D: Deps> DepGraphData<D> {
// ... and finally storing a "Green" entry in the color map.
// Multiple threads can all write the same color here
debug!("successfully marked {dep_node:?} as green");
debug!(
"successfully marked {:?} as green",
self.previous.index_to_node(prev_dep_node_index)
);
Some(dep_node_index)
}
}

View file

@ -39,11 +39,7 @@ pub trait DepContext: Copy {
#[inline(always)]
fn fingerprint_style(self, kind: DepKind) -> FingerprintStyle {
let vtable = self.dep_kind_vtable(kind);
if vtable.is_anon {
return FingerprintStyle::Opaque;
}
vtable.fingerprint_style
self.dep_kind_vtable(kind).fingerprint_style
}
#[inline(always)]
@ -148,6 +144,9 @@ impl<T: HasDepContext, Q: Copy> HasDepContext for (T, Q) {
}
/// Describes the contents of the fingerprint generated by a given query.
///
/// This is mainly for determining whether and how we can reconstruct a key
/// from the fingerprint.
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum FingerprintStyle {
/// The fingerprint is actually a DefPathHash.
@ -156,7 +155,7 @@ pub enum FingerprintStyle {
HirId,
/// Query key was `()` or equivalent, so fingerprint is just zero.
Unit,
/// Some opaque hash.
/// The fingerprint is an opaque hash, and a key cannot be reconstructed from it.
Opaque,
}

View file

@ -4,7 +4,7 @@ use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_span::{Span, Symbol};
#[derive(Subdiagnostic)]
#[note(query_system_cycle_stack_middle)]
#[note("...which requires {$desc}...")]
pub(crate) struct CycleStack {
#[primary_span]
pub span: Span,
@ -13,24 +13,26 @@ pub(crate) struct CycleStack {
#[derive(Subdiagnostic)]
pub(crate) enum StackCount {
#[note(query_system_cycle_stack_single)]
#[note("...which immediately requires {$stack_bottom} again")]
Single,
#[note(query_system_cycle_stack_multiple)]
#[note("...which again requires {$stack_bottom}, completing the cycle")]
Multiple,
}
#[derive(Subdiagnostic)]
pub(crate) enum Alias {
#[note(query_system_cycle_recursive_ty_alias)]
#[help(query_system_cycle_recursive_ty_alias_help1)]
#[help(query_system_cycle_recursive_ty_alias_help2)]
#[note("type aliases cannot be recursive")]
#[help("consider using a struct, enum, or union instead to break the cycle")]
#[help(
"see <https://doc.rust-lang.org/reference/types.html#recursive-types> for more information"
)]
Ty,
#[note(query_system_cycle_recursive_trait_alias)]
#[note("trait aliases cannot be recursive")]
Trait,
}
#[derive(Subdiagnostic)]
#[note(query_system_cycle_usage)]
#[note("cycle used when {$usage}")]
pub(crate) struct CycleUsage {
#[primary_span]
pub span: Span,
@ -38,7 +40,7 @@ pub(crate) struct CycleUsage {
}
#[derive(Diagnostic)]
#[diag(query_system_cycle, code = E0391)]
#[diag("cycle detected when {$stack_bottom}", code = E0391)]
pub(crate) struct Cycle {
#[primary_span]
pub span: Span,
@ -51,28 +53,34 @@ pub(crate) struct Cycle {
pub alias: Option<Alias>,
#[subdiagnostic]
pub cycle_usage: Option<CycleUsage>,
#[note]
#[note(
"see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information"
)]
pub note_span: (),
}
#[derive(Diagnostic)]
#[diag(query_system_reentrant)]
#[diag("internal compiler error: reentrant incremental verify failure, suppressing message")]
pub(crate) struct Reentrant;
#[derive(Diagnostic)]
#[diag(query_system_increment_compilation)]
#[note(query_system_increment_compilation_note1)]
#[note(query_system_increment_compilation_note2)]
#[note(query_system_increment_compilation_note3)]
#[note(query_system_increment_compilation_note4)]
#[diag("internal compiler error: encountered incremental compilation error with {$dep_node}")]
#[note("please follow the instructions below to create a bug report with the provided information")]
#[note("for incremental compilation bugs, having a reproduction is vital")]
#[note(
"an ideal reproduction consists of the code before and some patch that then triggers the bug when applied and compiled again"
)]
#[note("as a workaround, you can run {$run_cmd} to allow your project to compile")]
pub(crate) struct IncrementCompilation {
pub run_cmd: String,
pub dep_node: String,
}
#[derive(Diagnostic)]
#[help]
#[diag(query_system_query_overflow)]
#[help(
"consider increasing the recursion limit by adding a `#![recursion_limit = \"{$suggested_limit}\"]` attribute to your crate (`{$crate_name}`)"
)]
#[diag("queries overflow the depth limit!")]
pub struct QueryOverflow {
#[primary_span]
pub span: Span,
@ -83,7 +91,7 @@ pub struct QueryOverflow {
}
#[derive(Subdiagnostic)]
#[note(query_system_overflow_note)]
#[note("query depth increased by {$depth} when {$desc}")]
pub struct QueryOverflowNote {
pub desc: String,
pub depth: usize,

View file

@ -14,5 +14,3 @@ mod values;
pub use error::{QueryOverflow, QueryOverflowNote};
pub use values::Value;
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }

View file

@ -30,6 +30,8 @@ pub trait QueryCache: Sized {
fn complete(&self, key: Self::Key, value: Self::Value, index: DepNodeIndex);
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex));
fn len(&self) -> usize;
}
/// In-memory cache for queries whose keys aren't suitable for any of the
@ -71,6 +73,10 @@ where
}
}
}
fn len(&self) -> usize {
self.cache.len()
}
}
/// In-memory cache for queries whose key type only has one value (e.g. `()`).
@ -107,6 +113,10 @@ where
f(&(), &value.0, value.1)
}
}
fn len(&self) -> usize {
self.cache.get().is_some().into()
}
}
/// In-memory cache for queries whose key is a [`DefId`].
@ -157,6 +167,10 @@ where
});
self.foreign.iter(f);
}
fn len(&self) -> usize {
self.local.len() + self.foreign.len()
}
}
impl<K, V> QueryCache for VecCache<K, V, DepNodeIndex>
@ -180,4 +194,8 @@ where
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
self.iter(f)
}
fn len(&self) -> usize {
self.len()
}
}

View file

@ -25,7 +25,7 @@ type DepContextOf<'tcx, This: QueryDispatcher<'tcx>> =
/// Those types are not visible from this `rustc_query_system` crate.
///
/// "Dispatcher" should be understood as a near-synonym of "vtable".
pub trait QueryDispatcher<'tcx>: Copy {
pub trait QueryDispatcher<'tcx>: Copy + 'tcx {
fn name(self) -> &'static str;
/// Query context used by this dispatcher, i.e. `rustc_query_impl::QueryCtxt`.
@ -41,10 +41,10 @@ pub trait QueryDispatcher<'tcx>: Copy {
fn format_value(self) -> fn(&Self::Value) -> String;
// Don't use this method to access query results, instead use the methods on TyCtxt
fn query_state<'a>(self, tcx: Self::Qcx) -> &'a QueryState<'tcx, Self::Key>;
fn query_state(self, tcx: Self::Qcx) -> &'tcx QueryState<'tcx, Self::Key>;
// Don't use this method to access query results, instead use the methods on TyCtxt
fn query_cache<'a>(self, tcx: Self::Qcx) -> &'a Self::Cache;
fn query_cache(self, tcx: Self::Qcx) -> &'tcx Self::Cache;
fn will_cache_on_disk_for_key(self, tcx: DepContextOf<'tcx, Self>, key: &Self::Key) -> bool;

View file

@ -23,18 +23,17 @@ use crate::query::{QueryContext, QueryStackFrame};
pub struct QueryInfo<I> {
/// The span corresponding to the reason for which this query was required.
pub span: Span,
pub query: QueryStackFrame<I>,
pub frame: QueryStackFrame<I>,
}
impl<'tcx> QueryInfo<QueryStackDeferred<'tcx>> {
pub(crate) fn lift<Qcx: QueryContext<'tcx>>(
&self,
qcx: Qcx,
) -> QueryInfo<QueryStackFrameExtra> {
QueryInfo { span: self.span, query: self.query.lift(qcx) }
pub(crate) fn lift(&self) -> QueryInfo<QueryStackFrameExtra> {
QueryInfo { span: self.span, frame: self.frame.lift() }
}
}
/// Map from query job IDs to job information collected by
/// [`QueryContext::collect_active_jobs_from_all_queries`].
pub type QueryMap<'tcx> = FxHashMap<QueryJobId, QueryJobInfo<'tcx>>;
/// A value uniquely identifying an active query job.
@ -42,8 +41,8 @@ pub type QueryMap<'tcx> = FxHashMap<QueryJobId, QueryJobInfo<'tcx>>;
pub struct QueryJobId(pub NonZero<u64>);
impl QueryJobId {
fn query<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
map.get(&self).unwrap().query.clone()
fn frame<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
map.get(&self).unwrap().frame.clone()
}
fn span<'a, 'tcx>(self, map: &'a QueryMap<'tcx>) -> Span {
@ -61,7 +60,7 @@ impl QueryJobId {
#[derive(Clone, Debug)]
pub struct QueryJobInfo<'tcx> {
pub query: QueryStackFrame<QueryStackDeferred<'tcx>>,
pub frame: QueryStackFrame<QueryStackDeferred<'tcx>>,
pub job: QueryJob<'tcx>,
}
@ -125,7 +124,7 @@ impl QueryJobId {
while let Some(job) = current_job {
let info = query_map.get(&job).unwrap();
cycle.push(QueryInfo { span: info.job.span, query: info.query.clone() });
cycle.push(QueryInfo { span: info.job.span, frame: info.frame.clone() });
if job == *self {
cycle.reverse();
@ -140,7 +139,7 @@ impl QueryJobId {
.job
.parent
.as_ref()
.map(|parent| (info.job.span, parent.query(&query_map)));
.map(|parent| (info.job.span, parent.frame(&query_map)));
return CycleError { usage, cycle };
}
@ -158,13 +157,13 @@ impl QueryJobId {
) -> (QueryJobInfo<'tcx>, usize) {
let mut depth = 1;
let info = query_map.get(&self).unwrap();
let dep_kind = info.query.dep_kind;
let dep_kind = info.frame.dep_kind;
let mut current_id = info.job.parent;
let mut last_layout = (info.clone(), depth);
while let Some(id) = current_id {
let info = query_map.get(&id).unwrap();
if info.query.dep_kind == dep_kind {
if info.frame.dep_kind == dep_kind {
depth += 1;
last_layout = (info.clone(), depth);
}
@ -389,7 +388,7 @@ where
.iter()
.min_by_key(|v| {
let (span, query) = f(v);
let hash = query.query(query_map).hash;
let hash = query.frame(query_map).hash;
// Prefer entry points which have valid spans for nicer error messages
// We add an integer to the tuple ensuring that entry points
// with valid spans are picked first
@ -473,14 +472,14 @@ fn remove_cycle<'tcx>(
stack.rotate_left(pos);
}
let usage = usage.as_ref().map(|(span, query)| (*span, query.query(query_map)));
let usage = usage.as_ref().map(|(span, query)| (*span, query.frame(query_map)));
// Create the cycle error
let error = CycleError {
usage,
cycle: stack
.iter()
.map(|&(s, ref q)| QueryInfo { span: s, query: q.query(query_map) })
.map(|&(s, ref q)| QueryInfo { span: s, frame: q.frame(query_map) })
.collect(),
};
@ -559,7 +558,7 @@ pub fn report_cycle<'a>(
) -> Diag<'a> {
assert!(!stack.is_empty());
let span = stack[0].query.info.default_span(stack[1 % stack.len()].span);
let span = stack[0].frame.info.default_span(stack[1 % stack.len()].span);
let mut cycle_stack = Vec::new();
@ -567,9 +566,9 @@ pub fn report_cycle<'a>(
let stack_count = if stack.len() == 1 { StackCount::Single } else { StackCount::Multiple };
for i in 1..stack.len() {
let query = &stack[i].query;
let span = query.info.default_span(stack[(i + 1) % stack.len()].span);
cycle_stack.push(CycleStack { span, desc: query.info.description.to_owned() });
let frame = &stack[i].frame;
let span = frame.info.default_span(stack[(i + 1) % stack.len()].span);
cycle_stack.push(CycleStack { span, desc: frame.info.description.to_owned() });
}
let mut cycle_usage = None;
@ -581,9 +580,9 @@ pub fn report_cycle<'a>(
}
let alias =
if stack.iter().all(|entry| matches!(entry.query.info.def_kind, Some(DefKind::TyAlias))) {
if stack.iter().all(|entry| matches!(entry.frame.info.def_kind, Some(DefKind::TyAlias))) {
Some(crate::error::Alias::Ty)
} else if stack.iter().all(|entry| entry.query.info.def_kind == Some(DefKind::TraitAlias)) {
} else if stack.iter().all(|entry| entry.frame.info.def_kind == Some(DefKind::TraitAlias)) {
Some(crate::error::Alias::Trait)
} else {
None
@ -592,7 +591,7 @@ pub fn report_cycle<'a>(
let cycle_diag = crate::error::Cycle {
span,
cycle_stack,
stack_bottom: stack[0].query.info.description.to_owned(),
stack_bottom: stack[0].frame.info.description.to_owned(),
alias,
cycle_usage,
stack_count,
@ -616,7 +615,7 @@ pub fn print_query_stack<'tcx, Qcx: QueryContext<'tcx>>(
let mut count_total = 0;
// Make use of a partial query map if we fail to take locks collecting active queries.
let query_map = match qcx.collect_active_jobs(false) {
let query_map = match qcx.collect_active_jobs_from_all_queries(false) {
Ok(query_map) => query_map,
Err(query_map) => query_map,
};
@ -628,12 +627,12 @@ pub fn print_query_stack<'tcx, Qcx: QueryContext<'tcx>>(
let Some(query_info) = query_map.get(&query) else {
break;
};
let query_extra = qcx.lift_query_info(&query_info.query.info);
let query_extra = query_info.frame.info.extract();
if Some(count_printed) < limit_frames || limit_frames.is_none() {
// Only print to stderr as many stack frames as `num_frames` when present.
dcx.struct_failure_note(format!(
"#{} [{:?}] {}",
count_printed, query_info.query.dep_kind, query_extra.description
count_printed, query_info.frame.dep_kind, query_extra.description
))
.with_span(query_info.job.span)
.emit();
@ -645,7 +644,7 @@ pub fn print_query_stack<'tcx, Qcx: QueryContext<'tcx>>(
file,
"#{} [{}] {}",
count_total,
qcx.dep_context().dep_kind_vtable(query_info.query.dep_kind).name,
qcx.dep_context().dep_kind_vtable(query_info.frame.dep_kind).name,
query_extra.description
);
}

View file

@ -70,9 +70,9 @@ impl<'tcx> QueryStackFrame<QueryStackDeferred<'tcx>> {
Self { info, def_id, dep_kind, hash, def_id_for_ty_in_cycle }
}
fn lift<Qcx: QueryContext<'tcx>>(&self, qcx: Qcx) -> QueryStackFrame<QueryStackFrameExtra> {
fn lift(&self) -> QueryStackFrame<QueryStackFrameExtra> {
QueryStackFrame {
info: qcx.lift_query_info(&self.info),
info: self.info.extract(),
dep_kind: self.dep_kind,
hash: self.hash,
def_id: self.def_id,
@ -166,9 +166,10 @@ pub trait QueryContext<'tcx>: HasDepContext {
/// Get the query information from the TLS context.
fn current_query_job(self) -> Option<QueryJobId>;
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>>;
fn lift_query_info(self, info: &QueryStackDeferred<'tcx>) -> QueryStackFrameExtra;
fn collect_active_jobs_from_all_queries(
self,
require_complete: bool,
) -> Result<QueryMap<'tcx>, QueryMap<'tcx>>;
/// Load a side effect associated to the node in the previous session.
fn load_side_effect(
@ -183,6 +184,4 @@ pub trait QueryContext<'tcx>: HasDepContext {
/// new query job while it executes.
fn start_query<R>(self, token: QueryJobId, depth_limit: bool, compute: impl FnOnce() -> R)
-> R;
fn depth_limit_error(self, job: QueryJobId);
}

View file

@ -11,7 +11,6 @@ use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::hash_table::{self, Entry, HashTable};
use rustc_data_structures::sharded::{self, Sharded};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::LockGuard;
use rustc_data_structures::{outline, sync};
use rustc_errors::{Diag, FatalError, StashKey};
use rustc_span::{DUMMY_SP, Span};
@ -79,40 +78,46 @@ where
self.active.lock_shards().all(|shard| shard.is_empty())
}
pub fn collect_active_jobs<Qcx: Copy>(
/// Internal plumbing for collecting the set of active jobs for this query.
///
/// Should only be called from `gather_active_jobs`.
pub fn gather_active_jobs_inner<Qcx: Copy>(
&self,
qcx: Qcx,
make_query: fn(Qcx, K) -> QueryStackFrame<QueryStackDeferred<'tcx>>,
make_frame: fn(Qcx, K) -> QueryStackFrame<QueryStackDeferred<'tcx>>,
jobs: &mut QueryMap<'tcx>,
require_complete: bool,
) -> Option<()> {
let mut active = Vec::new();
let mut collect = |iter: LockGuard<'_, HashTable<(K, ActiveKeyStatus<'tcx>)>>| {
for (k, v) in iter.iter() {
// Helper to gather active jobs from a single shard.
let mut gather_shard_jobs = |shard: &HashTable<(K, ActiveKeyStatus<'tcx>)>| {
for (k, v) in shard.iter() {
if let ActiveKeyStatus::Started(ref job) = *v {
active.push((*k, job.clone()));
}
}
};
// Lock shards and gather jobs from each shard.
if require_complete {
for shard in self.active.lock_shards() {
collect(shard);
gather_shard_jobs(&shard);
}
} else {
// We use try_lock_shards here since we are called from the
// deadlock handler, and this shouldn't be locked.
for shard in self.active.try_lock_shards() {
collect(shard?);
let shard = shard?;
gather_shard_jobs(&shard);
}
}
// Call `make_query` while we're not holding a `self.active` lock as `make_query` may call
// Call `make_frame` while we're not holding a `self.active` lock as `make_frame` may call
// queries leading to a deadlock.
for (key, job) in active {
let query = make_query(qcx, key);
jobs.insert(job.id, QueryJobInfo { query, job });
let frame = make_frame(qcx, key);
jobs.insert(job.id, QueryJobInfo { frame, job });
}
Some(())
@ -127,11 +132,11 @@ impl<'tcx, K> Default for QueryState<'tcx, K> {
/// A type representing the responsibility to execute the job in the `job` field.
/// This will poison the relevant query if dropped.
struct JobOwner<'a, 'tcx, K>
struct JobOwner<'tcx, K>
where
K: Eq + Hash + Copy,
{
state: &'a QueryState<'tcx, K>,
state: &'tcx QueryState<'tcx, K>,
key: K,
}
@ -170,7 +175,7 @@ where
}
CycleErrorHandling::Stash => {
let guar = if let Some(root) = cycle_error.cycle.first()
&& let Some(span) = root.query.info.span
&& let Some(span) = root.frame.info.span
{
error.stash(span, StashKey::Cycle).unwrap()
} else {
@ -181,7 +186,7 @@ where
}
}
impl<'a, 'tcx, K> JobOwner<'a, 'tcx, K>
impl<'tcx, K> JobOwner<'tcx, K>
where
K: Eq + Hash + Copy,
{
@ -218,7 +223,7 @@ where
}
}
impl<'a, 'tcx, K> Drop for JobOwner<'a, 'tcx, K>
impl<'tcx, K> Drop for JobOwner<'tcx, K>
where
K: Eq + Hash + Copy,
{
@ -253,10 +258,10 @@ pub struct CycleError<I = QueryStackFrameExtra> {
}
impl<'tcx> CycleError<QueryStackDeferred<'tcx>> {
fn lift<Qcx: QueryContext<'tcx>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> {
fn lift(&self) -> CycleError<QueryStackFrameExtra> {
CycleError {
usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))),
cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(),
usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift())),
cycle: self.cycle.iter().map(|info| info.lift()).collect(),
}
}
}
@ -294,10 +299,13 @@ where
{
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
let query_map = qcx.collect_active_jobs(false).ok().expect("failed to collect active queries");
let query_map = qcx
.collect_active_jobs_from_all_queries(false)
.ok()
.expect("failed to collect active queries");
let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span);
(mk_cycle(query, qcx, error.lift(qcx)), None)
(mk_cycle(query, qcx, error.lift()), None)
}
#[inline(always)]
@ -345,7 +353,7 @@ where
(v, Some(index))
}
Err(cycle) => (mk_cycle(query, qcx, cycle.lift(qcx)), None),
Err(cycle) => (mk_cycle(query, qcx, cycle.lift()), None),
}
}
@ -422,7 +430,7 @@ where
fn execute_job<'tcx, Q, const INCR: bool>(
query: Q,
qcx: Q::Qcx,
state: &QueryState<'tcx, Q::Key>,
state: &'tcx QueryState<'tcx, Q::Key>,
key: Q::Key,
key_hash: u64,
id: QueryJobId,

View file

@ -24,15 +24,15 @@ use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use rustc_session::lint::BuiltinLintDiag;
use rustc_session::lint::builtin::{
ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, AMBIGUOUS_GLOB_IMPORTS, AMBIGUOUS_PANIC_IMPORTS,
MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS,
ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, AMBIGUOUS_GLOB_IMPORTS, AMBIGUOUS_IMPORT_VISIBILITIES,
AMBIGUOUS_PANIC_IMPORTS, MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS,
};
use rustc_session::utils::was_invoked_from_cargo;
use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::edition::Edition;
use rustc_span::hygiene::MacroKind;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::{BytePos, DUMMY_SP, Ident, Span, Symbol, SyntaxContext, kw, sym};
use rustc_span::{BytePos, Ident, Span, Symbol, SyntaxContext, kw, sym};
use thin_vec::{ThinVec, thin_vec};
use tracing::{debug, instrument};
@ -41,6 +41,7 @@ use crate::errors::{
ExplicitUnsafeTraits, MacroDefinedLater, MacroRulesNot, MacroSuggMovePosition,
MaybeMissingMacroRulesName,
};
use crate::hygiene::Macros20NormalizedSyntaxContext;
use crate::imports::{Import, ImportKind};
use crate::late::{DiagMetadata, PatternSource, Rib};
use crate::{
@ -144,6 +145,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
};
let lint = match ambiguity_warning {
_ if ambiguity_error.ambig_vis.is_some() => AMBIGUOUS_IMPORT_VISIBILITIES,
AmbiguityWarning::GlobImport => AMBIGUOUS_GLOB_IMPORTS,
AmbiguityWarning::PanicImport => AMBIGUOUS_PANIC_IMPORTS,
};
@ -1163,11 +1165,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
suggestions: &mut Vec<TypoSuggestion>,
scope_set: ScopeSet<'ra>,
ps: &ParentScope<'ra>,
ctxt: SyntaxContext,
sp: Span,
filter_fn: &impl Fn(Res) -> bool,
) {
let ctxt = DUMMY_SP.with_ctxt(ctxt);
self.cm().visit_scopes(scope_set, ps, ctxt, None, |this, scope, use_prelude, _| {
let ctxt = Macros20NormalizedSyntaxContext::new(sp.ctxt());
self.cm().visit_scopes(scope_set, ps, ctxt, sp, None, |this, scope, use_prelude, _| {
match scope {
Scope::DeriveHelpers(expn_id) => {
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
@ -1269,8 +1271,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
filter_fn: &impl Fn(Res) -> bool,
) -> Option<TypoSuggestion> {
let mut suggestions = Vec::new();
let ctxt = ident.span.ctxt();
self.add_scope_set_candidates(&mut suggestions, scope_set, parent_scope, ctxt, filter_fn);
self.add_scope_set_candidates(
&mut suggestions,
scope_set,
parent_scope,
ident.span,
filter_fn,
);
// Make sure error reporting is deterministic.
suggestions.sort_by(|a, b| a.candidate.as_str().cmp(b.candidate.as_str()));
@ -1989,7 +1996,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
fn ambiguity_diagnostic(&self, ambiguity_error: &AmbiguityError<'ra>) -> errors::Ambiguity {
let AmbiguityError { kind, ident, b1, b2, scope1, scope2, .. } = *ambiguity_error;
let AmbiguityError { kind, ambig_vis, ident, b1, b2, scope1, scope2, .. } =
*ambiguity_error;
let extern_prelude_ambiguity = || {
// Note: b1 may come from a module scope, as an extern crate item in module.
matches!(scope2, Scope::ExternPreludeFlags)
@ -2068,9 +2076,18 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
None
};
let ambig_vis = ambig_vis.map(|(vis1, vis2)| {
format!(
"{} or {}",
vis1.to_string(CRATE_DEF_ID, self.tcx),
vis2.to_string(CRATE_DEF_ID, self.tcx)
)
});
errors::Ambiguity {
ident,
help,
ambig_vis,
kind: kind.descr(),
b1_note,
b1_help_msgs,

View file

@ -1463,6 +1463,7 @@ pub(crate) struct UnknownDiagnosticAttributeTypoSugg {
// FIXME: Make this properly translatable.
pub(crate) struct Ambiguity {
pub ident: Ident,
pub ambig_vis: Option<String>,
pub kind: &'static str,
pub help: Option<&'static [&'static str]>,
pub b1_note: Spanned<String>,
@ -1473,8 +1474,12 @@ pub(crate) struct Ambiguity {
impl Ambiguity {
fn decorate<'a>(self, diag: &mut Diag<'a, impl EmissionGuarantee>) {
diag.primary_message(format!("`{}` is ambiguous", self.ident));
diag.span_label(self.ident.span, "ambiguous name");
if let Some(ambig_vis) = self.ambig_vis {
diag.primary_message(format!("ambiguous import visibility: {ambig_vis}"));
} else {
diag.primary_message(format!("`{}` is ambiguous", self.ident));
diag.span_label(self.ident.span, "ambiguous name");
}
diag.note(format!("ambiguous because of {}", self.kind));
diag.span_note(self.b1_note.span, self.b1_note.node);
if let Some(help) = self.help {

View file

@ -5,6 +5,7 @@ use Namespace::*;
use rustc_ast::{self as ast, NodeId};
use rustc_errors::ErrorGuaranteed;
use rustc_hir::def::{DefKind, MacroKinds, Namespace, NonMacroAttrKind, PartialRes, PerNS};
use rustc_middle::ty::Visibility;
use rustc_middle::{bug, span_bug};
use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK;
use rustc_session::parse::feature_err;
@ -54,9 +55,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
mut self: CmResolver<'r, 'ra, 'tcx>,
scope_set: ScopeSet<'ra>,
parent_scope: &ParentScope<'ra>,
// Location of the span is not significant, but pass a `Span` instead of `SyntaxContext`
// to avoid extracting and re-packaging the syntax context unnecessarily.
orig_ctxt: Span,
mut ctxt: Macros20NormalizedSyntaxContext,
orig_ident_span: Span,
derive_fallback_lint_id: Option<NodeId>,
mut visitor: impl FnMut(
CmResolver<'_, 'ra, 'tcx>,
@ -128,7 +128,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
TypeNS | ValueNS => Scope::ModuleNonGlobs(module, None),
MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
};
let mut ctxt = Macros20NormalizedSyntaxContext::new(orig_ctxt.ctxt());
let mut use_prelude = !module.no_implicit_prelude;
loop {
@ -153,7 +152,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
true
}
Scope::ModuleNonGlobs(..) | Scope::ModuleGlobs(..) => true,
Scope::MacroUsePrelude => use_prelude || orig_ctxt.edition().is_rust_2015(),
Scope::MacroUsePrelude => use_prelude || orig_ident_span.is_rust_2015(),
Scope::BuiltinAttrs => true,
Scope::ExternPreludeItems | Scope::ExternPreludeFlags => {
use_prelude || module_and_extern_prelude || extern_prelude
@ -396,9 +395,30 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
finalize: Option<Finalize>,
ignore_decl: Option<Decl<'ra>>,
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, Determinacy> {
self.resolve_ident_in_scope_set_inner(
IdentKey::new(orig_ident),
orig_ident.span,
scope_set,
parent_scope,
finalize,
ignore_decl,
ignore_import,
)
}
fn resolve_ident_in_scope_set_inner<'r>(
self: CmResolver<'r, 'ra, 'tcx>,
ident: IdentKey,
orig_ident_span: Span,
scope_set: ScopeSet<'ra>,
parent_scope: &ParentScope<'ra>,
finalize: Option<Finalize>,
ignore_decl: Option<Decl<'ra>>,
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, Determinacy> {
// Make sure `self`, `super` etc produce an error when passed to here.
if !matches!(scope_set, ScopeSet::Module(..)) && orig_ident.is_path_segment_keyword() {
if !matches!(scope_set, ScopeSet::Module(..)) && ident.name.is_path_segment_keyword() {
return Err(Determinacy::Determined);
}
@ -432,13 +452,14 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
let break_result = self.visit_scopes(
scope_set,
parent_scope,
orig_ident.span,
ident.ctxt,
orig_ident_span,
derive_fallback_lint_id,
|mut this, scope, use_prelude, ctxt| {
let ident = IdentKey { name: orig_ident.name, ctxt };
let ident = IdentKey { name: ident.name, ctxt };
let res = match this.reborrow().resolve_ident_in_scope(
ident,
orig_ident.span,
orig_ident_span,
ns,
scope,
use_prelude,
@ -465,20 +486,25 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// We do not need to report them if we are either in speculative resolution,
// or in late resolution when everything is already imported and expanded
// and no ambiguities exist.
if matches!(finalize, None | Some(Finalize { stage: Stage::Late, .. })) {
return ControlFlow::Break(Ok(decl));
}
let import_vis = match finalize {
None | Some(Finalize { stage: Stage::Late, .. }) => {
return ControlFlow::Break(Ok(decl));
}
Some(Finalize { import_vis, .. }) => import_vis,
};
if let Some(&(innermost_decl, _)) = innermost_results.first() {
// Found another solution, if the first one was "weak", report an error.
if this.get_mut().maybe_push_ambiguity(
orig_ident,
ident,
orig_ident_span,
ns,
scope_set,
parent_scope,
decl,
scope,
&innermost_results,
import_vis,
) {
// No need to search for more potential ambiguities, one is enough.
return ControlFlow::Break(Ok(innermost_decl));
@ -695,8 +721,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Scope::StdLibPrelude => {
let mut result = Err(Determinacy::Determined);
if let Some(prelude) = self.prelude
&& let Ok(decl) = self.reborrow().resolve_ident_in_scope_set(
ident.orig(orig_ident_span.with_ctxt(*ident.ctxt)),
&& let Ok(decl) = self.reborrow().resolve_ident_in_scope_set_inner(
ident,
orig_ident_span,
ScopeSet::Module(ns, prelude),
parent_scope,
None,
@ -749,19 +776,30 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn maybe_push_ambiguity(
&mut self,
orig_ident: Ident,
ident: IdentKey,
orig_ident_span: Span,
ns: Namespace,
scope_set: ScopeSet<'ra>,
parent_scope: &ParentScope<'ra>,
decl: Decl<'ra>,
scope: Scope<'ra>,
innermost_results: &[(Decl<'ra>, Scope<'ra>)],
import_vis: Option<Visibility>,
) -> bool {
let (innermost_decl, innermost_scope) = innermost_results[0];
let (res, innermost_res) = (decl.res(), innermost_decl.res());
if res == innermost_res {
let ambig_vis = if res != innermost_res {
None
} else if let Some(import_vis) = import_vis
&& let min =
(|d: Decl<'_>| d.vis().min(import_vis.to_def_id(), self.tcx).expect_local())
&& let (min1, min2) = (min(decl), min(innermost_decl))
&& min1 != min2
{
Some((min1, min2))
} else {
return false;
}
};
// FIXME: Use `scope` instead of `res` to detect built-in attrs and derive helpers,
// it will exclude imports, make slightly more code legal, and will require lang approval.
@ -775,7 +813,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
} else if innermost_res == derive_helper_compat {
Some(AmbiguityKind::DeriveHelper)
} else if res == derive_helper_compat && innermost_res != derive_helper {
span_bug!(orig_ident.span, "impossible inner resolution kind")
span_bug!(orig_ident_span, "impossible inner resolution kind")
} else if matches!(innermost_scope, Scope::MacroRules(_))
&& matches!(scope, Scope::ModuleNonGlobs(..) | Scope::ModuleGlobs(..))
&& !self.disambiguate_macro_rules_vs_modularized(innermost_decl, decl)
@ -790,7 +828,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
// we visit all macro_rules scopes (e.g. textual scope macros)
// before we visit any modules (e.g. path-based scope macros)
span_bug!(
orig_ident.span,
orig_ident_span,
"ambiguous scoped macro resolutions with path-based \
scope resolution as first candidate"
)
@ -839,8 +877,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
} else {
// Turn ambiguity errors for core vs std panic into warnings.
// FIXME: Remove with lang team approval.
let is_issue_147319_hack = orig_ident.span.edition() <= Edition::Edition2024
&& matches!(orig_ident.name, sym::panic)
let is_issue_147319_hack = orig_ident_span.edition() <= Edition::Edition2024
&& matches!(ident.name, sym::panic)
&& matches!(scope, Scope::StdLibPrelude)
&& matches!(innermost_scope, Scope::ModuleGlobs(_, _))
&& ((self.is_specific_builtin_macro(res, sym::std_panic)
@ -848,11 +886,18 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|| (self.is_specific_builtin_macro(res, sym::core_panic)
&& self.is_specific_builtin_macro(innermost_res, sym::std_panic)));
let warning = is_issue_147319_hack.then_some(AmbiguityWarning::PanicImport);
let warning = if ambig_vis.is_some() {
Some(AmbiguityWarning::GlobImport)
} else if is_issue_147319_hack {
Some(AmbiguityWarning::PanicImport)
} else {
None
};
self.ambiguity_errors.push(AmbiguityError {
kind,
ident: orig_ident,
ambig_vis,
ident: ident.orig(orig_ident_span),
b1: innermost_decl,
b2: decl,
scope1: innermost_scope,
@ -880,46 +925,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
#[instrument(level = "debug", skip(self))]
pub(crate) fn resolve_ident_in_module<'r>(
self: CmResolver<'r, 'ra, 'tcx>,
module: ModuleOrUniformRoot<'ra>,
mut ident: Ident,
ns: Namespace,
parent_scope: &ParentScope<'ra>,
finalize: Option<Finalize>,
ignore_decl: Option<Decl<'ra>>,
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, Determinacy> {
let tmp_parent_scope;
let mut adjusted_parent_scope = parent_scope;
match module {
ModuleOrUniformRoot::Module(m) => {
if let Some(def) = ident.span.normalize_to_macros_2_0_and_adjust(m.expansion) {
tmp_parent_scope =
ParentScope { module: self.expn_def_scope(def), ..*parent_scope };
adjusted_parent_scope = &tmp_parent_scope;
}
}
ModuleOrUniformRoot::ExternPrelude => {
ident.span.normalize_to_macros_2_0_and_adjust(ExpnId::root());
}
ModuleOrUniformRoot::ModuleAndExternPrelude(..) | ModuleOrUniformRoot::CurrentScope => {
// No adjustments
}
}
self.resolve_ident_in_virt_module_unadjusted(
module,
ident,
ns,
adjusted_parent_scope,
finalize,
ignore_decl,
ignore_import,
)
}
/// Attempts to resolve `ident` in namespace `ns` of `module`.
#[instrument(level = "debug", skip(self))]
fn resolve_ident_in_virt_module_unadjusted<'r>(
self: CmResolver<'r, 'ra, 'tcx>,
module: ModuleOrUniformRoot<'ra>,
ident: Ident,
@ -930,14 +935,22 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ignore_import: Option<Import<'ra>>,
) -> Result<Decl<'ra>, Determinacy> {
match module {
ModuleOrUniformRoot::Module(module) => self.resolve_ident_in_scope_set(
ident,
ScopeSet::Module(ns, module),
parent_scope,
finalize,
ignore_decl,
ignore_import,
),
ModuleOrUniformRoot::Module(module) => {
let (ident_key, def) = IdentKey::new_adjusted(ident, module.expansion);
let adjusted_parent_scope = match def {
Some(def) => ParentScope { module: self.expn_def_scope(def), ..*parent_scope },
None => *parent_scope,
};
self.resolve_ident_in_scope_set_inner(
ident_key,
ident.span,
ScopeSet::Module(ns, module),
&adjusted_parent_scope,
finalize,
ignore_decl,
ignore_import,
)
}
ModuleOrUniformRoot::ModuleAndExternPrelude(module) => self.resolve_ident_in_scope_set(
ident,
ScopeSet::ModuleAndExternPrelude(ns, module),
@ -950,8 +963,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if ns != TypeNS {
Err(Determined)
} else {
self.resolve_ident_in_scope_set(
ident,
self.resolve_ident_in_scope_set_inner(
IdentKey::new_adjusted(ident, ExpnId::root()).0,
ident.span,
ScopeSet::ExternPrelude,
parent_scope,
finalize,
@ -1145,8 +1159,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
None => return Err(ControlFlow::Continue(Undetermined)),
};
let tmp_parent_scope;
let (mut adjusted_parent_scope, mut ctxt) = (parent_scope, *ident.ctxt);
match ctxt.glob_adjust(module.expansion, glob_import.span) {
let (mut adjusted_parent_scope, mut adjusted_ident) = (parent_scope, ident);
match adjusted_ident
.ctxt
.update_unchecked(|ctxt| ctxt.glob_adjust(module.expansion, glob_import.span))
{
Some(Some(def)) => {
tmp_parent_scope =
ParentScope { module: self.expn_def_scope(def), ..*parent_scope };
@ -1155,8 +1172,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
Some(None) => {}
None => continue,
};
let result = self.reborrow().resolve_ident_in_scope_set(
ident.orig(orig_ident_span.with_ctxt(ctxt)),
let result = self.reborrow().resolve_ident_in_scope_set_inner(
adjusted_ident,
orig_ident_span,
ScopeSet::Module(ns, module),
adjusted_parent_scope,
None,

View file

@ -1189,7 +1189,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
ident,
ns,
&import.parent_scope,
Some(Finalize { report_private: false, ..finalize }),
Some(Finalize {
report_private: false,
import_vis: Some(import.vis),
..finalize
}),
bindings[ns].get().decl(),
Some(import),
);

View file

@ -2677,7 +2677,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
&mut names,
ScopeSet::All(ns),
parent_scope,
ctxt,
segment.ident.span.with_ctxt(ctxt),
filter_fn,
);
break;

View file

@ -8,6 +8,7 @@
// tidy-alphabetical-start
#![allow(internal_features)]
#![cfg_attr(bootstrap, feature(ptr_as_ref_unchecked))]
#![feature(arbitrary_self_types)]
#![feature(assert_matches)]
#![feature(box_patterns)]
@ -18,7 +19,6 @@
#![feature(default_field_values)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]
#![feature(ptr_as_ref_unchecked)]
#![feature(rustc_attrs)]
#![feature(trim_prefix_suffix)]
#![recursion_limit = "256"]
@ -575,6 +575,12 @@ impl IdentKey {
IdentKey { name: ident.name, ctxt: Macros20NormalizedSyntaxContext::new(ident.span.ctxt()) }
}
#[inline]
fn new_adjusted(ident: Ident, expn_id: ExpnId) -> (IdentKey, Option<ExpnId>) {
let (ctxt, def) = Macros20NormalizedSyntaxContext::new_adjusted(ident.span.ctxt(), expn_id);
(IdentKey { name: ident.name, ctxt }, def)
}
#[inline]
fn with_root_ctxt(name: Symbol) -> Self {
let ctxt = Macros20NormalizedSyntaxContext::new_unchecked(SyntaxContext::root());
@ -963,6 +969,7 @@ enum AmbiguityWarning {
struct AmbiguityError<'ra> {
kind: AmbiguityKind,
ambig_vis: Option<(Visibility, Visibility)>,
ident: Ident,
b1: Decl<'ra>,
b2: Decl<'ra>,
@ -1923,7 +1930,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
&mut self,
current_trait: Option<Module<'ra>>,
parent_scope: &ParentScope<'ra>,
ctxt: Span,
sp: Span,
assoc_item: Option<(Symbol, Namespace)>,
) -> Vec<TraitCandidate> {
let mut found_traits = Vec::new();
@ -1940,7 +1947,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
let scope_set = ScopeSet::All(TypeNS);
self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |mut this, scope, _, _| {
let ctxt = Macros20NormalizedSyntaxContext::new(sp.ctxt());
self.cm().visit_scopes(scope_set, parent_scope, ctxt, sp, None, |mut this, scope, _, _| {
match scope {
Scope::ModuleNonGlobs(module, _) => {
this.get_mut().traits_in_module(module, assoc_item, &mut found_traits);
@ -2080,6 +2088,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if let Some(b2) = used_decl.ambiguity.get() {
let ambiguity_error = AmbiguityError {
kind: AmbiguityKind::GlobVsGlob,
ambig_vis: None,
ident,
b1: used_decl,
b2,
@ -2549,6 +2558,8 @@ struct Finalize {
used: Used = Used::Other,
/// Finalizing early or late resolution.
stage: Stage = Stage::Early,
/// Nominal visibility of the import item, in case we are resolving an import's final segment.
import_vis: Option<Visibility> = None,
}
impl Finalize {
@ -2723,7 +2734,7 @@ mod ref_mut {
}
mod hygiene {
use rustc_span::SyntaxContext;
use rustc_span::{ExpnId, SyntaxContext};
/// A newtype around `SyntaxContext` that can only keep contexts produced by
/// [SyntaxContext::normalize_to_macros_2_0].
@ -2736,6 +2747,15 @@ mod hygiene {
Macros20NormalizedSyntaxContext(ctxt.normalize_to_macros_2_0())
}
#[inline]
pub(crate) fn new_adjusted(
mut ctxt: SyntaxContext,
expn_id: ExpnId,
) -> (Macros20NormalizedSyntaxContext, Option<ExpnId>) {
let def = ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
(Macros20NormalizedSyntaxContext(ctxt), def)
}
#[inline]
pub(crate) fn new_unchecked(ctxt: SyntaxContext) -> Macros20NormalizedSyntaxContext {
debug_assert_eq!(ctxt, ctxt.normalize_to_macros_2_0());

View file

@ -243,24 +243,24 @@ fn trait_object_ty<'tcx>(tcx: TyCtxt<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tc
.flat_map(|super_poly_trait_ref| {
tcx.associated_items(super_poly_trait_ref.def_id())
.in_definition_order()
.filter(|item| item.is_type())
.filter(|item| item.is_type() || item.is_const())
.filter(|item| !tcx.generics_require_sized_self(item.def_id))
.map(move |assoc_ty| {
.map(move |assoc_item| {
super_poly_trait_ref.map_bound(|super_trait_ref| {
let alias_ty =
ty::AliasTy::new_from_args(tcx, assoc_ty.def_id, super_trait_ref.args);
let resolved = tcx.normalize_erasing_regions(
ty::TypingEnv::fully_monomorphized(),
alias_ty.to_ty(tcx),
let projection_term = ty::AliasTerm::new_from_args(
tcx,
assoc_item.def_id,
super_trait_ref.args,
);
debug!("Resolved {:?} -> {resolved}", alias_ty.to_ty(tcx));
let term = tcx.normalize_erasing_regions(
ty::TypingEnv::fully_monomorphized(),
projection_term.to_term(tcx),
);
debug!("Projection {:?} -> {term}", projection_term.to_term(tcx),);
ty::ExistentialPredicate::Projection(
ty::ExistentialProjection::erase_self_ty(
tcx,
ty::ProjectionPredicate {
projection_term: alias_ty.into(),
term: resolved.into(),
},
ty::ProjectionPredicate { projection_term, term },
),
)
})

View file

@ -804,7 +804,7 @@ impl SyntaxContext {
/// Like `SyntaxContext::adjust`, but also normalizes `self` to macros 2.0.
#[inline]
pub(crate) fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
HygieneData::with(|data| {
*self = data.normalize_to_macros_2_0(*self);
data.adjust(self, expn_id)

View file

@ -1,4 +1,4 @@
use crate::spec::{Arch, Cc, LinkerFlavor, Target, TargetMetadata, base};
use crate::spec::{Arch, Cc, LinkerFlavor, Lld, Target, TargetMetadata, base};
pub(crate) fn target() -> Target {
let mut base = base::linux_musl::opts();
@ -8,8 +8,8 @@ pub(crate) fn target() -> Target {
base.features = "-small-data,+hvx-length128b".into();
base.has_rpath = true;
base.linker = Some("rust-lld".into());
base.linker_flavor = LinkerFlavor::Unix(Cc::Yes);
base.linker = Some("hexagon-unknown-linux-musl-clang".into());
base.linker_flavor = LinkerFlavor::Gnu(Cc::Yes, Lld::No);
base.c_enum_min_bits = Some(8);

View file

@ -1,4 +1,6 @@
use crate::spec::{Arch, PanicStrategy, Target, TargetMetadata, TargetOptions};
use crate::spec::{
Arch, Cc, LinkerFlavor, Lld, PanicStrategy, Target, TargetMetadata, TargetOptions,
};
pub(crate) fn target() -> Target {
Target {
@ -28,6 +30,7 @@ pub(crate) fn target() -> Target {
emit_debug_gdb_scripts: false,
c_enum_min_bits: Some(8),
linker: Some("rust-lld".into()),
linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
..Default::default()
},
}

View file

@ -24,8 +24,8 @@ pub(crate) fn target() -> Target {
os: Os::Qurt,
vendor: "unknown".into(),
cpu: "hexagonv69".into(),
linker: Some("rust-lld".into()),
linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
linker: Some("hexagon-clang".into()),
linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No),
exe_suffix: ".elf".into(),
dynamic_linking: true,
executables: true,

View file

@ -1629,6 +1629,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
};
if let Some(lhs) = lhs.to_alias_term()
&& let ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst = lhs.kind(self.tcx)
&& let Some((better_type_err, expected_term)) =
derive_better_type_error(lhs, rhs)
{
@ -1637,6 +1638,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
better_type_err,
)
} else if let Some(rhs) = rhs.to_alias_term()
&& let ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst = rhs.kind(self.tcx)
&& let Some((better_type_err, expected_term)) =
derive_better_type_error(rhs, lhs)
{

View file

@ -8,6 +8,7 @@ use std::ops::ControlFlow;
use rustc_errors::FatalError;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, LangItem, find_attr};
use rustc_middle::query::Providers;
@ -833,8 +834,10 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for IllegalSelfTypeVisitor<'tcx> {
match ct.kind() {
ty::ConstKind::Unevaluated(proj) if self.tcx.features().min_generic_const_args() => {
match self.allow_self_projections {
AllowSelfProjections::Yes => {
let trait_def_id = self.tcx.parent(proj.def);
AllowSelfProjections::Yes
if let trait_def_id = self.tcx.parent(proj.def)
&& self.tcx.def_kind(trait_def_id) == DefKind::Trait =>
{
let trait_ref = ty::TraitRef::from_assoc(self.tcx, trait_def_id, proj.args);
// Only walk contained consts if the parent trait is not a supertrait.
@ -844,7 +847,7 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for IllegalSelfTypeVisitor<'tcx> {
ct.super_visit_with(self)
}
}
AllowSelfProjections::No => ct.super_visit_with(self),
_ => ct.super_visit_with(self),
}
}
_ => ct.super_visit_with(self),

View file

@ -362,6 +362,13 @@ fn sort_vs_sort_by_impl<S: Sort>() {
assert_eq!(input_sort_by, expected);
}
pub fn box_value_impl<S: Sort>() {
for len in [3, 9, 35, 56, 132] {
test_is_sorted::<Box<i32>, S>(len, Box::new, patterns::random);
test_is_sorted::<Box<i32>, S>(len, Box::new, |len| patterns::random_sorted(len, 80.0));
}
}
gen_sort_test_fns_with_default_patterns!(
correct_i32,
|len, pattern_fn| test_is_sorted::<i32, S>(len, |val| val, pattern_fn),
@ -967,6 +974,7 @@ define_instantiate_sort_tests!(
[miri_yes, fixed_seed_rand_vec_prefix],
[miri_yes, int_edge],
[miri_yes, sort_vs_sort_by],
[miri_yes, box_value],
[miri_yes, correct_i32_random],
[miri_yes, correct_i32_random_z1],
[miri_yes, correct_i32_random_d2],

View file

@ -11,6 +11,7 @@
#![feature(repr_simd)]
#![feature(macro_metavar_expr_concat)]
#![feature(rustc_attrs)]
#![feature(float_bits_const)]
#![cfg_attr(f16_enabled, feature(f16))]
#![cfg_attr(f128_enabled, feature(f128))]
#![no_builtins]

View file

@ -39,62 +39,51 @@ impl_float_to_int!(f32 => u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i12
impl_float_to_int!(f64 => u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize);
impl_float_to_int!(f128 => u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize);
// Conversion traits for primitive integer and float types
// Conversions T -> T are covered by a blanket impl and therefore excluded
// Some conversions from and to usize/isize are not implemented due to portability concerns
macro_rules! impl_from {
(bool => $Int:ty $(,)?) => {
impl_from!(
bool => $Int,
#[stable(feature = "from_bool", since = "1.28.0")],
concat!(
"Converts a [`bool`] to [`", stringify!($Int), "`] losslessly.\n",
"The resulting value is `0` for `false` and `1` for `true` values.\n",
"\n",
"# Examples\n",
"\n",
"```\n",
"assert_eq!(", stringify!($Int), "::from(true), 1);\n",
"assert_eq!(", stringify!($Int), "::from(false), 0);\n",
"```\n",
),
);
};
($Small:ty => $Large:ty, #[$attr:meta] $(,)?) => {
impl_from!(
$Small => $Large,
#[$attr],
concat!("Converts [`", stringify!($Small), "`] to [`", stringify!($Large), "`] losslessly."),
);
};
($Small:ty => $Large:ty, #[$attr:meta], $doc:expr $(,)?) => {
#[$attr]
/// Implement `From<bool>` for integers
macro_rules! impl_from_bool {
($($int:ty)*) => {$(
#[stable(feature = "from_bool", since = "1.28.0")]
#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
impl const From<$Small> for $Large {
// Rustdocs on the impl block show a "[+] show undocumented items" toggle.
// Rustdocs on functions do not.
#[doc = $doc]
impl const From<bool> for $int {
/// Converts from [`bool`] to
#[doc = concat!("[`", stringify!($int), "`]")]
/// , by turning `false` into `0` and `true` into `1`.
///
/// # Examples
///
/// ```
#[doc = concat!("assert_eq!(", stringify!($int), "::from(false), 0);")]
///
#[doc = concat!("assert_eq!(", stringify!($int), "::from(true), 1);")]
/// ```
#[inline(always)]
fn from(small: $Small) -> Self {
small as Self
fn from(b: bool) -> Self {
b as Self
}
}
};
)*}
}
// boolean -> integer
impl_from!(bool => u8);
impl_from!(bool => u16);
impl_from!(bool => u32);
impl_from!(bool => u64);
impl_from!(bool => u128);
impl_from!(bool => usize);
impl_from!(bool => i8);
impl_from!(bool => i16);
impl_from!(bool => i32);
impl_from!(bool => i64);
impl_from!(bool => i128);
impl_from!(bool => isize);
impl_from_bool!(u8 u16 u32 u64 u128 usize);
impl_from_bool!(i8 i16 i32 i64 i128 isize);
/// Implement `From<$small>` for `$large`
macro_rules! impl_from {
($small:ty => $large:ty, #[$attr:meta]) => {
#[$attr]
#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
impl const From<$small> for $large {
#[doc = concat!("Converts from [`", stringify!($small), "`] to [`", stringify!($large), "`] losslessly.")]
#[inline(always)]
fn from(small: $small) -> Self {
debug_assert!(<$large>::MIN as i128 <= <$small>::MIN as i128);
debug_assert!(<$small>::MAX as u128 <= <$large>::MAX as u128);
small as Self
}
}
}
}
// unsigned integer -> unsigned integer
impl_from!(u8 => u16, #[stable(feature = "lossless_int_conv", since = "1.5.0")]);
@ -338,12 +327,48 @@ macro_rules! impl_try_from_both_bounded {
)*}
}
/// Implement `TryFrom<integer>` for `bool`
macro_rules! impl_try_from_integer_for_bool {
($($int:ty)+) => {$(
#[stable(feature = "try_from", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
impl const TryFrom<$int> for bool {
type Error = TryFromIntError;
/// Tries to create a bool from an integer type.
/// Returns an error if the integer is not 0 or 1.
///
/// # Examples
///
/// ```
#[doc = concat!("assert_eq!(0_", stringify!($int), ".try_into(), Ok(false));")]
///
#[doc = concat!("assert_eq!(1_", stringify!($int), ".try_into(), Ok(true));")]
///
#[doc = concat!("assert!(<", stringify!($int), " as TryInto<bool>>::try_into(2).is_err());")]
/// ```
#[inline]
fn try_from(i: $int) -> Result<Self, Self::Error> {
match i {
0 => Ok(false),
1 => Ok(true),
_ => Err(TryFromIntError(())),
}
}
}
)*}
}
macro_rules! rev {
($mac:ident, $source:ty => $($target:ty),+) => {$(
$mac!($target => $source);
)*}
}
// integer -> bool
impl_try_from_integer_for_bool!(u128 u64 u32 u16 u8);
impl_try_from_integer_for_bool!(i128 i64 i32 i16 i8);
// unsigned integer -> unsigned integer
impl_try_from_upper_bounded!(u16 => u8);
impl_try_from_upper_bounded!(u32 => u8, u16);

View file

@ -97,7 +97,6 @@
// tidy-alphabetical-start
#![feature(array_ptr_get)]
#![feature(asm_experimental_arch)]
#![feature(bigint_helper_methods)]
#![feature(bstr)]
#![feature(bstr_internals)]
#![feature(cfg_select)]
@ -107,6 +106,7 @@
#![feature(const_destruct)]
#![feature(const_eval_select)]
#![feature(const_select_unpredictable)]
#![feature(const_unsigned_bigint_helpers)]
#![feature(core_intrinsics)]
#![feature(coverage_attribute)]
#![feature(disjoint_bitor)]
@ -120,6 +120,7 @@
#![feature(ptr_alignment_type)]
#![feature(ptr_metadata)]
#![feature(set_ptr_value)]
#![feature(signed_bigint_helpers)]
#![feature(slice_ptr_get)]
#![feature(str_internals)]
#![feature(str_split_inclusive_remainder)]
@ -129,6 +130,7 @@
#![feature(unsafe_pinned)]
#![feature(utf16_extra)]
#![feature(variant_count)]
#![feature(widening_mul)]
// tidy-alphabetical-end
//
// Language features:

View file

@ -108,6 +108,17 @@ pub mod consts {
pub const FRAC_1_SQRT_3: f128 =
0.577350269189625764509148780501957455647601751270126876018602_f128;
/// sqrt(5)
#[unstable(feature = "more_float_constants", issue = "146939")]
// Also, #[unstable(feature = "f128", issue = "116909")]
pub const SQRT_5: f128 = 2.23606797749978969640917366873127623544061835961152572427089_f128;
/// 1/sqrt(5)
#[unstable(feature = "more_float_constants", issue = "146939")]
// Also, #[unstable(feature = "f128", issue = "116909")]
pub const FRAC_1_SQRT_5: f128 =
0.447213595499957939281834733746255247088123671922305144854179_f128;
/// Euler's number (e)
#[unstable(feature = "f128", issue = "116909")]
pub const E: f128 = 2.71828182845904523536028747135266249775724709369995957496697_f128;
@ -143,6 +154,11 @@ impl f128 {
#[unstable(feature = "f128", issue = "116909")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
// #[unstable(feature = "f128", issue = "116909")]
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 128;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

View file

@ -103,6 +103,16 @@ pub mod consts {
// Also, #[unstable(feature = "more_float_constants", issue = "146939")]
pub const FRAC_1_SQRT_3: f16 = 0.577350269189625764509148780501957456_f16;
/// sqrt(5)
#[unstable(feature = "more_float_constants", issue = "146939")]
// Also, #[unstable(feature = "f16", issue = "116909")]
pub const SQRT_5: f16 = 2.23606797749978969640917366873127623_f16;
/// 1/sqrt(5)
#[unstable(feature = "more_float_constants", issue = "146939")]
// Also, #[unstable(feature = "f16", issue = "116909")]
pub const FRAC_1_SQRT_5: f16 = 0.44721359549995793928183473374625524_f16;
/// Euler's number (e)
#[unstable(feature = "f16", issue = "116909")]
pub const E: f16 = 2.71828182845904523536028747135266250_f16;
@ -138,6 +148,11 @@ impl f16 {
#[unstable(feature = "f16", issue = "116909")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
// #[unstable(feature = "f16", issue = "116909")]
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 16;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

Some files were not shown because too many files have changed in this diff Show more