Merge ref '44a5b55557' from rust-lang/rust

Pull recent changes from https://github.com/rust-lang/rust via Josh.

Upstream ref: rust-lang/rust@44a5b55557
Filtered ref: rust-lang/rust-analyzer@d2e7b71402
Upstream diff: 548e586795...44a5b55557

This merge was created using https://github.com/rust-lang/josh-sync.
This commit is contained in:
The rustc-josh-sync Cronjob Bot 2026-01-12 04:26:37 +00:00
commit 0f0951f14d
564 changed files with 7952 additions and 3719 deletions

View file

@ -289,7 +289,7 @@ jobs:
fi
# Get closest bors merge commit
PARENT_COMMIT=`git rev-list --author='bors <bors@rust-lang.org>' -n1 --first-parent HEAD^1`
PARENT_COMMIT=`git rev-list --author='122020455+rust-bors\[bot\]@users.noreply.github.com' -n1 --first-parent HEAD^1`
./build/citool/debug/citool postprocess-metrics \
--job-name ${CI_JOB_NAME} \

View file

@ -182,19 +182,6 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "askama"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f75363874b771be265f4ffe307ca705ef6f3baa19011c149da8674a87f1b75c4"
dependencies = [
"askama_derive 0.14.0",
"itoa",
"percent-encoding",
"serde",
"serde_json",
]
[[package]]
name = "askama"
version = "0.15.1"
@ -208,30 +195,13 @@ dependencies = [
"serde_json",
]
[[package]]
name = "askama_derive"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "129397200fe83088e8a68407a8e2b1f826cf0086b21ccdb866a722c8bcd3a94f"
dependencies = [
"askama_parser 0.14.0",
"basic-toml",
"memchr",
"proc-macro2",
"quote",
"rustc-hash 2.1.1",
"serde",
"serde_derive",
"syn 2.0.110",
]
[[package]]
name = "askama_derive"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ba5e7259a1580c61571e3116ebaaa01e3c001b2132b17c4cc5c70780ca3e994"
dependencies = [
"askama_parser 0.15.1",
"askama_parser",
"basic-toml",
"memchr",
"proc-macro2",
@ -248,19 +218,7 @@ version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "236ce20b77cb13506eaf5024899f4af6e12e8825f390bd943c4c37fd8f322e46"
dependencies = [
"askama_derive 0.15.1",
]
[[package]]
name = "askama_parser"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6ab5630b3d5eaf232620167977f95eb51f3432fc76852328774afbd242d4358"
dependencies = [
"memchr",
"serde",
"serde_derive",
"winnow 0.7.13",
"askama_derive",
]
[[package]]
@ -675,7 +633,7 @@ name = "clippy"
version = "0.1.94"
dependencies = [
"anstream",
"askama 0.14.0",
"askama",
"cargo_metadata 0.18.1",
"clippy_config",
"clippy_lints",
@ -1566,7 +1524,7 @@ name = "generate-copyright"
version = "0.1.0"
dependencies = [
"anyhow",
"askama 0.15.1",
"askama",
"cargo_metadata 0.21.0",
"serde",
"serde_json",
@ -3412,9 +3370,9 @@ checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc-literal-escaper"
version = "0.0.5"
version = "0.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4ee29da77c5a54f42697493cd4c9b9f31b74df666a6c04dfc4fde77abe0438b"
checksum = "8be87abb9e40db7466e0681dc8ecd9dcfd40360cb10b4c8fe24a7c4c3669b198"
[[package]]
name = "rustc-main"
@ -4421,6 +4379,7 @@ dependencies = [
"rustc_errors",
"rustc_fluent_macro",
"rustc_hir",
"rustc_index",
"rustc_macros",
"rustc_middle",
"rustc_session",
@ -4912,7 +4871,7 @@ name = "rustdoc"
version = "0.0.0"
dependencies = [
"arrayvec",
"askama 0.15.1",
"askama",
"base64",
"expect-test",
"indexmap",

View file

@ -618,7 +618,7 @@ impl DroplessArena {
/// - Types that are `!Copy` and `Drop`: these must be specified in the
/// arguments. The `TypedArena` will be used for them.
///
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
#[derive(Default)]
pub struct Arena<'tcx> {

View file

@ -7,7 +7,7 @@ edition = "2024"
# tidy-alphabetical-start
bitflags = "2.4.1"
memchr = "2.7.6"
rustc-literal-escaper = "0.0.5"
rustc-literal-escaper = "0.0.7"
rustc_ast_ir = { path = "../rustc_ast_ir" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_index = { path = "../rustc_index" }

View file

@ -2117,10 +2117,9 @@ pub struct MacroDef {
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic, Walkable)]
pub struct EiiExternTarget {
/// path to the extern item we're targetting
/// path to the extern item we're targeting
pub extern_item_path: Path,
pub impl_unsafe: bool,
pub span: Span,
}
#[derive(Clone, Encodable, Decodable, Debug, Copy, Hash, Eq, PartialEq)]
@ -3813,6 +3812,19 @@ pub struct Fn {
pub struct EiiImpl {
pub node_id: NodeId,
pub eii_macro_path: Path,
/// This field is an implementation detail that prevents a lot of bugs.
/// See <https://github.com/rust-lang/rust/issues/149981> for an example.
///
/// The problem is, that if we generate a declaration *together* with its default,
/// we generate both a declaration and an implementation. The generated implementation
/// uses the same mechanism to register itself as a user-defined implementation would,
/// despite being invisible to users. What does happen is a name resolution step.
/// The invisible default implementation has to find the declaration.
/// Both are generated at the same time, so we can skip that name resolution step.
///
/// This field is that shortcut: we prefill the extern target to skip a name resolution step,
/// making sure it never fails. It'd be awful UX if we fail name resolution in code invisible to the user.
pub known_eii_macro_resolution: Option<EiiExternTarget>,
pub impl_safety: Safety,
pub span: Span,
pub inner_span: Span,

View file

@ -235,6 +235,34 @@ impl AttributeExt for Attribute {
}
}
fn deprecation_note(&self) -> Option<Symbol> {
match &self.kind {
AttrKind::Normal(normal) if normal.item.path == sym::deprecated => {
let meta = &normal.item;
// #[deprecated = "..."]
if let Some(s) = meta.value_str() {
return Some(s);
}
// #[deprecated(note = "...")]
if let Some(list) = meta.meta_item_list() {
for nested in list {
if let Some(mi) = nested.meta_item()
&& mi.path == sym::note
&& let Some(s) = mi.value_str()
{
return Some(s);
}
}
}
None
}
_ => None,
}
}
fn doc_resolution_scope(&self) -> Option<AttrStyle> {
match &self.kind {
AttrKind::DocComment(..) => Some(self.style),
@ -277,6 +305,7 @@ impl Attribute {
pub fn may_have_doc_links(&self) -> bool {
self.doc_str().is_some_and(|s| comments::may_have_doc_links(s.as_str()))
|| self.deprecation_note().is_some_and(|s| comments::may_have_doc_links(s.as_str()))
}
/// Extracts the MetaItem from inside this Attribute.
@ -873,6 +902,11 @@ pub trait AttributeExt: Debug {
/// * `#[doc(...)]` returns `None`.
fn doc_str(&self) -> Option<Symbol>;
/// Returns the deprecation note if this is deprecation attribute.
/// * `#[deprecated = "note"]` returns `Some("note")`.
/// * `#[deprecated(note = "note", ...)]` returns `Some("note")`.
fn deprecation_note(&self) -> Option<Symbol>;
fn is_proc_macro_attr(&self) -> bool {
[sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
.iter()

View file

@ -2,7 +2,7 @@ use rustc_abi::ExternAbi;
use rustc_ast::visit::AssocCtxt;
use rustc_ast::*;
use rustc_errors::{E0570, ErrorGuaranteed, struct_span_code_err};
use rustc_hir::attrs::{AttributeKind, EiiDecl};
use rustc_hir::attrs::{AttributeKind, EiiDecl, EiiImplResolution};
use rustc_hir::def::{DefKind, PerNS, Res};
use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId};
use rustc_hir::{
@ -134,6 +134,56 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
fn lower_eii_extern_target(
&mut self,
id: NodeId,
eii_name: Ident,
EiiExternTarget { extern_item_path, impl_unsafe }: &EiiExternTarget,
) -> Option<EiiDecl> {
self.lower_path_simple_eii(id, extern_item_path).map(|did| EiiDecl {
eii_extern_target: did,
impl_unsafe: *impl_unsafe,
name: eii_name,
})
}
fn lower_eii_impl(
&mut self,
EiiImpl {
node_id,
eii_macro_path,
impl_safety,
span,
inner_span,
is_default,
known_eii_macro_resolution,
}: &EiiImpl,
) -> hir::attrs::EiiImpl {
let resolution = if let Some(target) = known_eii_macro_resolution
&& let Some(decl) = self.lower_eii_extern_target(
*node_id,
// the expect is ok here since we always generate this path in the eii macro.
eii_macro_path.segments.last().expect("at least one segment").ident,
target,
) {
EiiImplResolution::Known(decl)
} else if let Some(macro_did) = self.lower_path_simple_eii(*node_id, eii_macro_path) {
EiiImplResolution::Macro(macro_did)
} else {
EiiImplResolution::Error(
self.dcx().span_delayed_bug(*span, "eii never resolved without errors given"),
)
};
hir::attrs::EiiImpl {
span: self.lower_span(*span),
inner_span: self.lower_span(*inner_span),
impl_marked_unsafe: self.lower_safety(*impl_safety, hir::Safety::Safe).is_unsafe(),
is_default: *is_default,
resolution,
}
}
fn generate_extra_attrs_for_item_kind(
&mut self,
id: NodeId,
@ -143,49 +193,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
ItemKind::Fn(box Fn { eii_impls, .. }) if eii_impls.is_empty() => Vec::new(),
ItemKind::Fn(box Fn { eii_impls, .. }) => {
vec![hir::Attribute::Parsed(AttributeKind::EiiImpls(
eii_impls
.iter()
.flat_map(
|EiiImpl {
node_id,
eii_macro_path,
impl_safety,
span,
inner_span,
is_default,
}| {
self.lower_path_simple_eii(*node_id, eii_macro_path).map(|did| {
hir::attrs::EiiImpl {
eii_macro: did,
span: self.lower_span(*span),
inner_span: self.lower_span(*inner_span),
impl_marked_unsafe: self
.lower_safety(*impl_safety, hir::Safety::Safe)
.is_unsafe(),
is_default: *is_default,
}
})
},
)
.collect(),
eii_impls.iter().map(|i| self.lower_eii_impl(i)).collect(),
))]
}
ItemKind::MacroDef(
_,
MacroDef {
eii_extern_target: Some(EiiExternTarget { extern_item_path, impl_unsafe, span }),
..
},
) => self
.lower_path_simple_eii(id, extern_item_path)
.map(|did| {
vec![hir::Attribute::Parsed(AttributeKind::EiiExternTarget(EiiDecl {
eii_extern_target: did,
impl_unsafe: *impl_unsafe,
span: self.lower_span(*span),
}))]
})
ItemKind::MacroDef(name, MacroDef { eii_extern_target: Some(target), .. }) => self
.lower_eii_extern_target(id, *name, target)
.map(|decl| vec![hir::Attribute::Parsed(AttributeKind::EiiExternTarget(decl))])
.unwrap_or_default(),
ItemKind::ExternCrate(..)
| ItemKind::Use(..)
| ItemKind::Static(..)

View file

@ -1986,8 +1986,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let (name, kind) = self.lower_generic_param_kind(param, source);
let hir_id = self.lower_node_id(param.id);
self.lower_attrs(hir_id, &param.attrs, param.span(), Target::Param);
hir::GenericParam {
let param_attrs = &param.attrs;
let param_span = param.span();
let param = hir::GenericParam {
hir_id,
def_id: self.local_def_id(param.id),
name,
@ -1996,7 +1997,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
kind,
colon_span: param.colon_span.map(|s| self.lower_span(s)),
source,
}
};
self.lower_attrs(hir_id, param_attrs, param_span, Target::from_generic_param(&param));
param
}
fn lower_generic_param_kind(
@ -2536,6 +2539,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
overly_complex_const(self)
}
ExprKind::Lit(literal) => {
let span = expr.span;
let literal = self.lower_lit(literal, span);
ConstArg {
hir_id: self.lower_node_id(expr.id),
kind: hir::ConstArgKind::Literal(literal.node),
span,
}
}
_ => overly_complex_const(self),
}
}

View file

@ -9,7 +9,7 @@ impl<S: Stage> SingleAttributeParser<S> for CrateNameParser {
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name");
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let ArgParser::NameValue(n) = args else {
@ -33,7 +33,7 @@ impl<S: Stage> SingleAttributeParser<S> for RecursionLimitParser {
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: "N", "https://doc.rust-lang.org/reference/attributes/limits.html#the-recursion_limit-attribute");
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let ArgParser::NameValue(nv) = args else {
@ -56,7 +56,7 @@ impl<S: Stage> SingleAttributeParser<S> for MoveSizeLimitParser {
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: "N");
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let ArgParser::NameValue(nv) = args else {
@ -79,7 +79,7 @@ impl<S: Stage> SingleAttributeParser<S> for TypeLengthLimitParser {
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: "N");
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let ArgParser::NameValue(nv) = args else {
@ -102,7 +102,7 @@ impl<S: Stage> SingleAttributeParser<S> for PatternComplexityLimitParser {
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: "N");
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let ArgParser::NameValue(nv) = args else {
@ -123,7 +123,7 @@ pub(crate) struct NoCoreParser;
impl<S: Stage> NoArgsAttributeParser<S> for NoCoreParser {
const PATH: &[Symbol] = &[sym::no_core];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = AttributeKind::NoCore;
}
@ -132,7 +132,7 @@ pub(crate) struct NoStdParser;
impl<S: Stage> NoArgsAttributeParser<S> for NoStdParser {
const PATH: &[Symbol] = &[sym::no_std];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = AttributeKind::NoStd;
}
@ -141,7 +141,7 @@ pub(crate) struct RustcCoherenceIsCoreParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcCoherenceIsCoreParser {
const PATH: &[Symbol] = &[sym::rustc_coherence_is_core];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = AttributeKind::RustcCoherenceIsCore;
}
@ -151,7 +151,7 @@ impl<S: Stage> SingleAttributeParser<S> for WindowsSubsystemParser {
const PATH: &[Symbol] = &[sym::windows_subsystem];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const TEMPLATE: AttributeTemplate = template!(NameValueStr: ["windows", "console"], "https://doc.rust-lang.org/reference/runtime.html#the-windows_subsystem-attribute");
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {

View file

@ -305,3 +305,18 @@ impl<S: Stage> SingleAttributeParser<S> for RustcScalableVectorParser {
Some(AttributeKind::RustcScalableVector { element_count: Some(n), span: cx.attr_span })
}
}
pub(crate) struct RustcHasIncoherentInherentImplsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcHasIncoherentInherentImplsParser {
const PATH: &[Symbol] = &[sym::rustc_has_incoherent_inherent_impls];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Trait),
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
Allow(Target::ForeignTy),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcHasIncoherentInherentImpls;
}

View file

@ -1,6 +1,7 @@
use std::num::NonZero;
use rustc_errors::ErrorGuaranteed;
use rustc_hir::target::GenericParamKind;
use rustc_hir::{
DefaultBodyStability, MethodKind, PartialConstStability, Stability, StabilityLevel,
StableSince, Target, UnstableReason, VERSION_PLACEHOLDER,
@ -43,7 +44,7 @@ const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::TyAlias),
Allow(Target::Variant),
Allow(Target::Field),
Allow(Target::Param),
Allow(Target::GenericParam { kind: GenericParamKind::Type, has_default: true }),
Allow(Target::Static),
Allow(Target::ForeignFn),
Allow(Target::ForeignStatic),

View file

@ -15,7 +15,7 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser {
});
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::MacroDef)]);
const TEMPLATE: AttributeTemplate =
template!(NameValueStr: ["transparent", "semitransparent", "opaque"]);
template!(NameValueStr: ["transparent", "semiopaque", "opaque"]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<AttributeKind> {
let Some(nv) = args.name_value() else {
@ -24,12 +24,12 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser {
};
match nv.value_as_str() {
Some(sym::transparent) => Some(Transparency::Transparent),
Some(sym::semiopaque | sym::semitransparent) => Some(Transparency::SemiOpaque),
Some(sym::semiopaque) => Some(Transparency::SemiOpaque),
Some(sym::opaque) => Some(Transparency::Opaque),
Some(_) => {
cx.expected_specific_argument_strings(
nv.value_span,
&[sym::transparent, sym::semitransparent, sym::opaque],
&[sym::transparent, sym::semiopaque, sym::opaque],
);
None
}

View file

@ -4,12 +4,12 @@ use std::ops::{Deref, DerefMut};
use std::sync::LazyLock;
use private::Sealed;
use rustc_ast::{AttrStyle, CRATE_NODE_ID, MetaItemLit, NodeId};
use rustc_ast::{AttrStyle, MetaItemLit, NodeId};
use rustc_errors::{Diag, Diagnostic, Level};
use rustc_feature::{AttrSuggestionStyle, AttributeTemplate};
use rustc_hir::attrs::AttributeKind;
use rustc_hir::lints::{AttributeLint, AttributeLintKind};
use rustc_hir::{AttrPath, CRATE_HIR_ID, HirId};
use rustc_hir::{AttrPath, HirId};
use rustc_session::Session;
use rustc_session::lint::{Lint, LintId};
use rustc_span::{ErrorGuaranteed, Span, Symbol};
@ -62,12 +62,12 @@ use crate::attributes::proc_macro_attrs::{
use crate::attributes::prototype::CustomMirParser;
use crate::attributes::repr::{AlignParser, AlignStaticParser, ReprParser};
use crate::attributes::rustc_internal::{
RustcLayoutScalarValidRangeEndParser, RustcLayoutScalarValidRangeStartParser,
RustcLegacyConstGenericsParser, RustcLintDiagnosticsParser, RustcLintOptDenyFieldAccessParser,
RustcLintOptTyParser, RustcLintQueryInstabilityParser,
RustcLintUntrackedQueryInformationParser, RustcMainParser, RustcMustImplementOneOfParser,
RustcNeverReturnsNullPointerParser, RustcNoImplicitAutorefsParser,
RustcObjectLifetimeDefaultParser, RustcScalableVectorParser,
RustcHasIncoherentInherentImplsParser, RustcLayoutScalarValidRangeEndParser,
RustcLayoutScalarValidRangeStartParser, RustcLegacyConstGenericsParser,
RustcLintDiagnosticsParser, RustcLintOptDenyFieldAccessParser, RustcLintOptTyParser,
RustcLintQueryInstabilityParser, RustcLintUntrackedQueryInformationParser, RustcMainParser,
RustcMustImplementOneOfParser, RustcNeverReturnsNullPointerParser,
RustcNoImplicitAutorefsParser, RustcObjectLifetimeDefaultParser, RustcScalableVectorParser,
RustcSimdMonomorphizeLaneLimitParser,
};
use crate::attributes::semantics::MayDangleParser;
@ -264,6 +264,7 @@ attribute_parsers!(
Single<WithoutArgs<ProcMacroParser>>,
Single<WithoutArgs<PubTransparentParser>>,
Single<WithoutArgs<RustcCoherenceIsCoreParser>>,
Single<WithoutArgs<RustcHasIncoherentInherentImplsParser>>,
Single<WithoutArgs<RustcLintDiagnosticsParser>>,
Single<WithoutArgs<RustcLintOptTyParser>>,
Single<WithoutArgs<RustcLintQueryInstabilityParser>>,
@ -303,8 +304,6 @@ pub trait Stage: Sized + 'static + Sealed {
) -> ErrorGuaranteed;
fn should_emit(&self) -> ShouldEmit;
fn id_is_crate_root(id: Self::Id) -> bool;
}
// allow because it's a sealed trait
@ -326,10 +325,6 @@ impl Stage for Early {
fn should_emit(&self) -> ShouldEmit {
self.emit_errors
}
fn id_is_crate_root(id: Self::Id) -> bool {
id == CRATE_NODE_ID
}
}
// allow because it's a sealed trait
@ -351,10 +346,6 @@ impl Stage for Late {
fn should_emit(&self) -> ShouldEmit {
ShouldEmit::ErrorsAndLints
}
fn id_is_crate_root(id: Self::Id) -> bool {
id == CRATE_HIR_ID
}
}
/// used when parsing attributes for miscellaneous things *before* ast lowering

View file

@ -15,11 +15,6 @@ use crate::session_diagnostics::InvalidTarget;
pub(crate) enum AllowedTargets {
AllowList(&'static [Policy]),
AllowListWarnRest(&'static [Policy]),
/// Special, and not the same as `AllowList(&[Allow(Target::Crate)])`.
/// For crate-level attributes we emit a specific set of lints to warn
/// people about accidentally not using them on the crate.
/// Only use this for attributes that are *exclusively* valid at the crate level.
CrateLevel,
}
pub(crate) enum AllowedResult {
@ -53,7 +48,6 @@ impl AllowedTargets {
AllowedResult::Warn
}
}
AllowedTargets::CrateLevel => AllowedResult::Allowed,
}
}
@ -61,7 +55,6 @@ impl AllowedTargets {
match self {
AllowedTargets::AllowList(list) => list,
AllowedTargets::AllowListWarnRest(list) => list,
AllowedTargets::CrateLevel => ALL_TARGETS,
}
.iter()
.filter_map(|target| match target {
@ -95,7 +88,10 @@ impl<'sess, S: Stage> AttributeParser<'sess, S> {
target: Target,
cx: &mut AcceptContext<'_, 'sess, S>,
) {
Self::check_type(matches!(allowed_targets, AllowedTargets::CrateLevel), target, cx);
if allowed_targets.allowed_targets() == &[Target::Crate] {
Self::check_crate_level(target, cx);
return;
}
match allowed_targets.is_allowed(target) {
AllowedResult::Allowed => {}
@ -149,18 +145,10 @@ impl<'sess, S: Stage> AttributeParser<'sess, S> {
}
}
pub(crate) fn check_type(
crate_level: bool,
target: Target,
cx: &mut AcceptContext<'_, 'sess, S>,
) {
let is_crate_root = S::id_is_crate_root(cx.target_id);
if is_crate_root {
return;
}
if !crate_level {
pub(crate) fn check_crate_level(target: Target, cx: &mut AcceptContext<'_, 'sess, S>) {
// For crate-level attributes we emit a specific set of lints to warn
// people about accidentally not using them on the crate.
if target == Target::Crate {
return;
}
@ -310,5 +298,29 @@ pub(crate) const ALL_TARGETS: &'static [Policy] = {
Allow(Target::Crate),
Allow(Target::Delegation { mac: false }),
Allow(Target::Delegation { mac: true }),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Const,
has_default: false,
}),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Const,
has_default: true,
}),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Lifetime,
has_default: false,
}),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Lifetime,
has_default: true,
}),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Type,
has_default: false,
}),
Allow(Target::GenericParam {
kind: rustc_hir::target::GenericParamKind::Type,
has_default: true,
}),
]
};

View file

@ -182,6 +182,8 @@ builtin_macros_expected_other = expected operand, {$is_inline_asm ->
builtin_macros_export_macro_rules = cannot export macro_rules! macros from a `proc-macro` crate type currently
builtin_macros_format_add_missing_colon = add a colon before the format specifier
builtin_macros_format_duplicate_arg = duplicate argument named `{$ident}`
.label1 = previously here
.label2 = duplicate argument

View file

@ -103,19 +103,19 @@ fn eii_(
// span of the declaring item without attributes
let item_span = func.sig.span;
// span of the eii attribute and the item below it, i.e. the full declaration
let decl_span = eii_attr_span.to(item_span);
let foreign_item_name = func.ident;
let mut return_items = Vec::new();
if func.body.is_some() {
return_items.push(Box::new(generate_default_impl(
ecx,
&func,
impl_unsafe,
macro_name,
eii_attr_span,
item_span,
foreign_item_name,
)))
}
@ -133,7 +133,7 @@ fn eii_(
macro_name,
foreign_item_name,
impl_unsafe,
decl_span,
&attrs_from_decl,
)));
return_items.into_iter().map(wrap_item).collect()
@ -187,11 +187,13 @@ fn filter_attrs_for_multiple_eii_attr(
}
fn generate_default_impl(
ecx: &mut ExtCtxt<'_>,
func: &ast::Fn,
impl_unsafe: bool,
macro_name: Ident,
eii_attr_span: Span,
item_span: Span,
foreign_item_name: Ident,
) -> ast::Item {
// FIXME: re-add some original attrs
let attrs = ThinVec::new();
@ -208,6 +210,21 @@ fn generate_default_impl(
},
span: eii_attr_span,
is_default: true,
known_eii_macro_resolution: Some(ast::EiiExternTarget {
extern_item_path: ast::Path {
span: foreign_item_name.span,
segments: thin_vec![
ast::PathSegment {
ident: Ident::from_str_and_span("super", foreign_item_name.span,),
id: DUMMY_NODE_ID,
args: None
},
ast::PathSegment { ident: foreign_item_name, id: DUMMY_NODE_ID, args: None },
],
tokens: None,
},
impl_unsafe,
}),
});
ast::Item {
@ -236,18 +253,66 @@ fn generate_default_impl(
stmts: thin_vec![ast::Stmt {
id: DUMMY_NODE_ID,
kind: ast::StmtKind::Item(Box::new(ast::Item {
attrs,
attrs: ThinVec::new(),
id: DUMMY_NODE_ID,
span: item_span,
vis: ast::Visibility {
span: eii_attr_span,
span: item_span,
kind: ast::VisibilityKind::Inherited,
tokens: None
},
kind: ItemKind::Fn(Box::new(default_func)),
kind: ItemKind::Mod(
ast::Safety::Default,
Ident::from_str_and_span("dflt", item_span),
ast::ModKind::Loaded(
thin_vec![
Box::new(ast::Item {
attrs: thin_vec![ecx.attr_nested_word(
sym::allow,
sym::unused_imports,
item_span
),],
id: DUMMY_NODE_ID,
span: item_span,
vis: ast::Visibility {
span: eii_attr_span,
kind: ast::VisibilityKind::Inherited,
tokens: None
},
kind: ItemKind::Use(ast::UseTree {
prefix: ast::Path::from_ident(
Ident::from_str_and_span(
"super", item_span,
)
),
kind: ast::UseTreeKind::Glob,
span: item_span,
}),
tokens: None,
}),
Box::new(ast::Item {
attrs,
id: DUMMY_NODE_ID,
span: item_span,
vis: ast::Visibility {
span: eii_attr_span,
kind: ast::VisibilityKind::Inherited,
tokens: None
},
kind: ItemKind::Fn(Box::new(default_func)),
tokens: None,
}),
],
ast::Inline::Yes,
ast::ModSpans {
inner_span: item_span,
inject_use_span: item_span,
}
)
),
tokens: None,
})),
span: eii_attr_span
span: eii_attr_span,
}],
id: DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Default,
@ -352,10 +417,14 @@ fn generate_attribute_macro_to_implement(
macro_name: Ident,
foreign_item_name: Ident,
impl_unsafe: bool,
decl_span: Span,
attrs_from_decl: &[Attribute],
) -> ast::Item {
let mut macro_attrs = ThinVec::new();
// To avoid e.g. `error: attribute macro has missing stability attribute`
// errors for eii's in std.
macro_attrs.extend_from_slice(attrs_from_decl);
// #[builtin_macro(eii_shared_macro)]
macro_attrs.push(ecx.attr_nested_word(sym::rustc_builtin_macro, sym::eii_shared_macro, span));
@ -394,7 +463,6 @@ fn generate_attribute_macro_to_implement(
eii_extern_target: Some(ast::EiiExternTarget {
extern_item_path: ast::Path::from_ident(foreign_item_name),
impl_unsafe,
span: decl_span,
}),
},
),
@ -451,7 +519,7 @@ pub(crate) fn eii_extern_target(
false
};
d.eii_extern_target = Some(EiiExternTarget { extern_item_path, impl_unsafe, span });
d.eii_extern_target = Some(EiiExternTarget { extern_item_path, impl_unsafe });
// Return the original item and the new methods.
vec![item]
@ -508,6 +576,7 @@ pub(crate) fn eii_shared_macro(
impl_safety: meta_item.unsafety,
span,
is_default,
known_eii_macro_resolution: None,
});
vec![item]

View file

@ -643,6 +643,15 @@ pub(crate) enum InvalidFormatStringSuggestion {
span: Span,
replacement: String,
},
#[suggestion(
builtin_macros_format_add_missing_colon,
code = ":?",
applicability = "machine-applicable"
)]
AddMissingColon {
#[primary_span]
span: Span,
},
}
#[derive(Diagnostic)]

View file

@ -329,6 +329,10 @@ fn make_format_args(
replacement,
});
}
parse::Suggestion::AddMissingColon(span) => {
let span = fmt_span.from_inner(InnerSpan::new(span.start, span.end));
e.sugg_ = Some(errors::InvalidFormatStringSuggestion::AddMissingColon { span });
}
}
let guar = ecx.dcx().emit_err(e);
return ExpandResult::Ready(Err(guar));

View file

@ -744,43 +744,43 @@ unsafe extern "C" {
pub struct VaList<'a>(&'a mut VaListImpl);
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro stringify($($t:tt)*) {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro file() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro line() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro cfg() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro asm() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro global_asm() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro naked_asm() {
/* compiler built-in */
}

View file

@ -233,7 +233,7 @@ pub(crate) fn compile_global_asm(
#![allow(internal_features)]
#![no_core]
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
macro global_asm() { /* compiler built-in */ }
global_asm!(r###"
"####,

View file

@ -748,25 +748,25 @@ extern "C" {
pub struct VaList<'a>(&'a mut VaListImpl);
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro stringify($($t:tt)*) {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro file() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro line() {
/* compiler built-in */
}
#[rustc_builtin_macro]
#[rustc_macro_transparency = "semitransparent"]
#[rustc_macro_transparency = "semiopaque"]
pub macro cfg() {
/* compiler built-in */
}

View file

@ -187,6 +187,10 @@ fn check_and_apply_linkage<'ll, 'tcx>(
};
llvm::set_linkage(g1, base::linkage_to_llvm(linkage));
// Normally this is done in `get_static_inner`, but when as we generate an internal global,
// it will apply the dso_local to the internal global instead, so do it here, too.
cx.assume_dso_local(g1, true);
// Declare an internal global `extern_with_linkage_foo` which
// is initialized with the address of `foo`. If `foo` is
// discarded during linking (for example, if `foo` has weak

View file

@ -379,19 +379,19 @@ fn update_target_reliable_float_cfg(sess: &Session, cfg: &mut TargetConfig) {
{
false
}
// Unsupported <https://github.com/llvm/llvm-project/issues/94434>
(Arch::Arm64EC, _) => false,
// Unsupported <https://github.com/llvm/llvm-project/issues/94434> (fixed in llvm22)
(Arch::Arm64EC, _) if major < 22 => false,
// Selection failure <https://github.com/llvm/llvm-project/issues/50374> (fixed in llvm21)
(Arch::S390x, _) if major < 21 => false,
// MinGW ABI bugs <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=115054>
(Arch::X86_64, Os::Windows) if *target_env == Env::Gnu && *target_abi != Abi::Llvm => false,
// Infinite recursion <https://github.com/llvm/llvm-project/issues/97981>
(Arch::CSky, _) => false,
(Arch::CSky, _) if major < 22 => false, // (fixed in llvm22)
(Arch::Hexagon, _) if major < 21 => false, // (fixed in llvm21)
(Arch::LoongArch32 | Arch::LoongArch64, _) if major < 21 => false, // (fixed in llvm21)
(Arch::PowerPC | Arch::PowerPC64, _) => false,
(Arch::Sparc | Arch::Sparc64, _) => false,
(Arch::Wasm32 | Arch::Wasm64, _) => false,
(Arch::PowerPC | Arch::PowerPC64, _) if major < 22 => false, // (fixed in llvm22)
(Arch::Sparc | Arch::Sparc64, _) if major < 22 => false, // (fixed in llvm22)
(Arch::Wasm32 | Arch::Wasm64, _) if major < 22 => false, // (fixed in llvm22)
// `f16` support only requires that symbols converting to and from `f32` are available. We
// provide these in `compiler-builtins`, so `f16` should be available on all platforms that
// do not have other ABI issues or LLVM crashes.

View file

@ -10,7 +10,6 @@ use rustc_middle::mir::mono::Visibility;
use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv, LayoutOf};
use rustc_middle::ty::{self, Instance, TypeVisitableExt};
use rustc_session::config::CrateType;
use rustc_span::Symbol;
use rustc_target::spec::{Arch, RelocModel};
use tracing::debug;
@ -92,17 +91,19 @@ impl<'tcx> PreDefineCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
}
impl CodegenCx<'_, '_> {
fn add_aliases(&self, aliasee: &llvm::Value, aliases: &[(Symbol, Linkage, Visibility)]) {
fn add_aliases(&self, aliasee: &llvm::Value, aliases: &[(DefId, Linkage, Visibility)]) {
let ty = self.get_type_of_global(aliasee);
for (alias, linkage, visibility) in aliases {
let symbol_name = self.tcx.symbol_name(Instance::mono(self.tcx, *alias));
tracing::debug!("ALIAS: {alias:?} {linkage:?} {visibility:?}");
let lldecl = llvm::add_alias(
self.llmod,
ty,
AddressSpace::ZERO,
aliasee,
&CString::new(alias.as_str()).unwrap(),
&CString::new(symbol_name.name).unwrap(),
);
llvm::set_visibility(lldecl, base::visibility_to_llvm(*visibility));
@ -110,7 +111,7 @@ impl CodegenCx<'_, '_> {
}
}
/// Whether a definition or declaration can be assumed to be local to a group of
/// A definition or declaration can be assumed to be local to a group of
/// libraries that form a single DSO or executable.
/// Marks the local as DSO if so.
pub(crate) fn assume_dso_local(&self, llval: &llvm::Value, is_declaration: bool) -> bool {
@ -152,7 +153,7 @@ impl CodegenCx<'_, '_> {
return false;
}
// With pie relocation model calls of functions defined in the translation
// With pie relocation model, calls of functions defined in the translation
// unit can use copy relocations.
if self.tcx.sess.relocation_model() == RelocModel::Pie && !is_declaration {
return true;

View file

@ -1,12 +1,13 @@
use rustc_abi::{Align, BackendRepr, Endian, HasDataLayout, Primitive, Size, TyAndLayout};
use rustc_abi::{Align, BackendRepr, Endian, HasDataLayout, Primitive, Size};
use rustc_codegen_ssa::MemFlags;
use rustc_codegen_ssa::common::IntPredicate;
use rustc_codegen_ssa::mir::operand::OperandRef;
use rustc_codegen_ssa::traits::{
BaseTypeCodegenMethods, BuilderMethods, ConstCodegenMethods, LayoutTypeCodegenMethods,
};
use rustc_middle::bug;
use rustc_middle::ty::Ty;
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout};
use rustc_target::spec::{Abi, Arch, Env};
use crate::builder::Builder;
@ -82,6 +83,7 @@ enum PassMode {
enum SlotSize {
Bytes8 = 8,
Bytes4 = 4,
Bytes1 = 1,
}
enum AllowHigherAlign {
@ -728,7 +730,7 @@ fn emit_x86_64_sysv64_va_arg<'ll, 'tcx>(
fn copy_to_temporary_if_more_aligned<'ll, 'tcx>(
bx: &mut Builder<'_, 'll, 'tcx>,
reg_addr: &'ll Value,
layout: TyAndLayout<'tcx, Ty<'tcx>>,
layout: TyAndLayout<'tcx>,
src_align: Align,
) -> &'ll Value {
if layout.layout.align.abi > src_align {
@ -751,7 +753,7 @@ fn copy_to_temporary_if_more_aligned<'ll, 'tcx>(
fn x86_64_sysv64_va_arg_from_memory<'ll, 'tcx>(
bx: &mut Builder<'_, 'll, 'tcx>,
va_list_addr: &'ll Value,
layout: TyAndLayout<'tcx, Ty<'tcx>>,
layout: TyAndLayout<'tcx>,
) -> &'ll Value {
let dl = bx.cx.data_layout();
let ptr_align_abi = dl.data_layout().pointer_align().abi;
@ -1003,15 +1005,17 @@ fn emit_xtensa_va_arg<'ll, 'tcx>(
return bx.load(layout.llvm_type(bx), value_ptr, layout.align.abi);
}
/// Determine the va_arg implementation to use. The LLVM va_arg instruction
/// is lacking in some instances, so we should only use it as a fallback.
pub(super) fn emit_va_arg<'ll, 'tcx>(
bx: &mut Builder<'_, 'll, 'tcx>,
addr: OperandRef<'tcx, &'ll Value>,
target_ty: Ty<'tcx>,
) -> &'ll Value {
// Determine the va_arg implementation to use. The LLVM va_arg instruction
// is lacking in some instances, so we should only use it as a fallback.
let target = &bx.cx.tcx.sess.target;
let layout = bx.cx.layout_of(target_ty);
let target_ty_size = layout.layout.size().bytes();
let target = &bx.cx.tcx.sess.target;
match target.arch {
Arch::X86 => emit_ptr_va_arg(
bx,
@ -1069,23 +1073,79 @@ pub(super) fn emit_va_arg<'ll, 'tcx>(
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
Arch::LoongArch32 => emit_ptr_va_arg(
bx,
addr,
target_ty,
if target_ty_size > 2 * 4 { PassMode::Indirect } else { PassMode::Direct },
SlotSize::Bytes4,
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
Arch::LoongArch64 => emit_ptr_va_arg(
bx,
addr,
target_ty,
if target_ty_size > 2 * 8 { PassMode::Indirect } else { PassMode::Direct },
SlotSize::Bytes8,
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
Arch::AmdGpu => emit_ptr_va_arg(
bx,
addr,
target_ty,
PassMode::Direct,
SlotSize::Bytes4,
AllowHigherAlign::No,
ForceRightAdjust::No,
),
Arch::Nvptx64 => emit_ptr_va_arg(
bx,
addr,
target_ty,
PassMode::Direct,
SlotSize::Bytes1,
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
Arch::Wasm32 => emit_ptr_va_arg(
bx,
addr,
target_ty,
if layout.is_aggregate() || layout.is_zst() || layout.is_1zst() {
PassMode::Indirect
} else {
PassMode::Direct
},
SlotSize::Bytes4,
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
Arch::Wasm64 => bug!("c-variadic functions are not fully implemented for wasm64"),
Arch::CSky => emit_ptr_va_arg(
bx,
addr,
target_ty,
PassMode::Direct,
SlotSize::Bytes4,
AllowHigherAlign::Yes,
ForceRightAdjust::No,
),
// Windows x86_64
Arch::X86_64 if target.is_like_windows => {
let target_ty_size = bx.cx.size_of(target_ty).bytes();
emit_ptr_va_arg(
bx,
addr,
target_ty,
if target_ty_size > 8 || !target_ty_size.is_power_of_two() {
PassMode::Indirect
} else {
PassMode::Direct
},
SlotSize::Bytes8,
AllowHigherAlign::No,
ForceRightAdjust::No,
)
}
Arch::X86_64 if target.is_like_windows => emit_ptr_va_arg(
bx,
addr,
target_ty,
if target_ty_size > 8 || !target_ty_size.is_power_of_two() {
PassMode::Indirect
} else {
PassMode::Direct
},
SlotSize::Bytes8,
AllowHigherAlign::No,
ForceRightAdjust::No,
),
// This includes `target.is_like_darwin`, which on x86_64 targets is like sysv64.
Arch::X86_64 => emit_x86_64_sysv64_va_arg(bx, addr, target_ty),
Arch::Xtensa => emit_xtensa_va_arg(bx, addr, target_ty),

View file

@ -10,6 +10,8 @@ codegen_ssa_archive_build_failure = failed to build archive at `{$path}`: {$erro
codegen_ssa_binary_output_to_tty = option `-o` or `--emit` is used to write binary output type `{$shorthand}` to stdout, but stdout is a tty
codegen_ssa_bpf_staticlib_not_supported = linking static libraries is not supported for BPF
codegen_ssa_cgu_not_recorded =
CGU-reuse for `{$cgu_user_name}` is (mangled: `{$cgu_name}`) was not recorded

View file

@ -22,10 +22,11 @@ use tracing::trace;
use super::metadata::{create_compressed_metadata_file, search_for_section};
use crate::common;
// Re-exporting for rustc_codegen_llvm::back::archive
pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
// Public for ArchiveBuilderBuilder::extract_bundled_libs
pub use crate::errors::ExtractBundledLibsError;
use crate::errors::{
DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary, ErrorWritingDEFFile,
ArchiveBuildFailure, DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary,
ErrorWritingDEFFile, UnknownArchiveKind,
};
/// An item to be included in an import library.

View file

@ -839,6 +839,11 @@ impl<'a> Linker for GccLinker<'a> {
self.sess.dcx().emit_fatal(errors::LibDefWriteFailure { error });
}
self.link_arg(path);
} else if self.sess.target.is_like_wasm {
self.link_arg("--no-export-dynamic");
for (sym, _) in symbols {
self.link_arg("--export").link_arg(sym);
}
} else if crate_type == CrateType::Executable && !self.sess.target.is_like_solaris {
let res: io::Result<()> = try {
let mut f = File::create_buffered(&path)?;
@ -853,11 +858,6 @@ impl<'a> Linker for GccLinker<'a> {
self.sess.dcx().emit_fatal(errors::VersionScriptWriteFailure { error });
}
self.link_arg("--dynamic-list").link_arg(path);
} else if self.sess.target.is_like_wasm {
self.link_arg("--no-export-dynamic");
for (sym, _) in symbols {
self.link_arg("--export").link_arg(sym);
}
} else {
// Write an LD version script
let res: io::Result<()> = try {
@ -2075,7 +2075,7 @@ impl<'a> Linker for BpfLinker<'a> {
}
fn link_staticlib_by_name(&mut self, _name: &str, _verbatim: bool, _whole_archive: bool) {
panic!("staticlibs not supported")
self.sess.dcx().emit_fatal(errors::BpfStaticlibNotSupported)
}
fn link_staticlib_by_path(&mut self, path: &Path, _whole_archive: bool) {

View file

@ -3,7 +3,9 @@ use std::str::FromStr;
use rustc_abi::{Align, ExternAbi};
use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode};
use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr};
use rustc_hir::attrs::{AttributeKind, InlineAttr, Linkage, RtsanSetting, UsedBy};
use rustc_hir::attrs::{
AttributeKind, EiiImplResolution, InlineAttr, Linkage, RtsanSetting, UsedBy,
};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId};
use rustc_hir::{self as hir, Attribute, LangItem, find_attr, lang_items};
@ -13,10 +15,10 @@ use rustc_middle::middle::codegen_fn_attrs::{
use rustc_middle::mir::mono::Visibility;
use rustc_middle::query::Providers;
use rustc_middle::span_bug;
use rustc_middle::ty::{self as ty, Instance, TyCtxt};
use rustc_middle::ty::{self as ty, TyCtxt};
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_span::{Span, Symbol, sym};
use rustc_span::{Span, sym};
use rustc_target::spec::Os;
use crate::errors;
@ -285,13 +287,23 @@ fn process_builtin_attrs(
}
AttributeKind::EiiImpls(impls) => {
for i in impls {
let extern_item = find_attr!(
tcx.get_all_attrs(i.eii_macro),
AttributeKind::EiiExternTarget(target) => target.eii_extern_target
)
.expect("eii should have declaration macro with extern target attribute");
let symbol_name = tcx.symbol_name(Instance::mono(tcx, extern_item));
let extern_item = match i.resolution {
EiiImplResolution::Macro(def_id) => {
let Some(extern_item) = find_attr!(
tcx.get_all_attrs(def_id),
AttributeKind::EiiExternTarget(target) => target.eii_extern_target
) else {
tcx.dcx().span_delayed_bug(
i.span,
"resolved to something that's not an EII",
);
continue;
};
extern_item
}
EiiImplResolution::Known(decl) => decl.eii_extern_target,
EiiImplResolution::Error(_eg) => continue,
};
// this is to prevent a bug where a single crate defines both the default and explicit implementation
// for an EII. In that case, both of them may be part of the same final object file. I'm not 100% sure
@ -304,13 +316,13 @@ fn process_builtin_attrs(
// iterate over all implementations *in the current crate*
// (this is ok since we generate codegen fn attrs in the local crate)
// if any of them is *not default* then don't emit the alias.
&& tcx.externally_implementable_items(LOCAL_CRATE).get(&i.eii_macro).expect("at least one").1.iter().any(|(_, imp)| !imp.is_default)
&& tcx.externally_implementable_items(LOCAL_CRATE).get(&extern_item).expect("at least one").1.iter().any(|(_, imp)| !imp.is_default)
{
continue;
}
codegen_fn_attrs.foreign_item_symbol_aliases.push((
Symbol::intern(symbol_name.name),
extern_item,
if i.is_default { Linkage::LinkOnceAny } else { Linkage::External },
Visibility::Default,
));

View file

@ -661,7 +661,7 @@ pub(crate) struct RlibArchiveBuildFailure {
}
#[derive(Diagnostic)]
// Public for rustc_codegen_llvm::back::archive
// Public for ArchiveBuilderBuilder::extract_bundled_libs
pub enum ExtractBundledLibsError<'a> {
#[diag(codegen_ssa_extract_bundled_libs_open_file)]
OpenFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
@ -700,19 +700,21 @@ pub(crate) struct UnsupportedLinkSelfContained;
#[derive(Diagnostic)]
#[diag(codegen_ssa_archive_build_failure)]
// Public for rustc_codegen_llvm::back::archive
pub struct ArchiveBuildFailure {
pub(crate) struct ArchiveBuildFailure {
pub path: PathBuf,
pub error: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unknown_archive_kind)]
// Public for rustc_codegen_llvm::back::archive
pub struct UnknownArchiveKind<'a> {
pub(crate) struct UnknownArchiveKind<'a> {
pub kind: &'a str,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_bpf_staticlib_not_supported)]
pub(crate) struct BpfStaticlibNotSupported;
#[derive(Diagnostic)]
#[diag(codegen_ssa_multiple_main_functions)]
#[help]

View file

@ -6,9 +6,7 @@
// having basically only two use-cases that act in different ways.
use rustc_errors::ErrorGuaranteed;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::DefKind;
use rustc_hir::{LangItem, find_attr};
use rustc_hir::LangItem;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, AdtDef, Ty};
@ -366,14 +364,10 @@ where
// check performed after the promotion. Verify that with an assertion.
assert!(promoted.is_none() || Q::ALLOW_PROMOTED);
// Avoid looking at attrs of anon consts as that will ICE
let is_type_const_item =
matches!(cx.tcx.def_kind(def), DefKind::Const | DefKind::AssocConst)
&& find_attr!(cx.tcx.get_all_attrs(def), AttributeKind::TypeConst(_));
// Don't peak inside trait associated constants, also `#[type_const] const` items
// don't have bodies so there's nothing to look at
if promoted.is_none() && cx.tcx.trait_of_assoc(def).is_none() && !is_type_const_item {
if promoted.is_none() && cx.tcx.trait_of_assoc(def).is_none() && !cx.tcx.is_type_const(def)
{
let qualifs = cx.tcx.at(constant.span).mir_const_qualif(def);
if !Q::in_qualifs(&qualifs) {

View file

@ -8,7 +8,7 @@ use rustc_data_structures::fx::{FxHashMap, FxIndexMap, IndexEntry};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{self as hir, CRATE_HIR_ID, LangItem};
use rustc_middle::mir::AssertMessage;
use rustc_middle::mir::interpret::ReportedErrorInfo;
use rustc_middle::mir::interpret::{Pointer, ReportedErrorInfo};
use rustc_middle::query::TyCtxtAt;
use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout, ValidityRequirement};
use rustc_middle::ty::{self, Ty, TyCtxt};
@ -22,7 +22,7 @@ use crate::errors::{LongRunning, LongRunningWarn};
use crate::fluent_generated as fluent;
use crate::interpret::{
self, AllocId, AllocInit, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame,
GlobalAlloc, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, Pointer, RangeSet, Scalar,
GlobalAlloc, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, RangeSet, Scalar,
compile_time_machine, err_inval, interp_ok, throw_exhaust, throw_inval, throw_ub,
throw_ub_custom, throw_unsup, throw_unsup_format,
};
@ -586,6 +586,11 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
}
}
sym::type_of => {
let ty = ecx.read_type_id(&args[0])?;
ecx.write_type_info(ty, dest)?;
}
_ => {
// We haven't handled the intrinsic, let's see if we can use a fallback body.
if ecx.tcx.intrinsic(instance.def_id()).unwrap().must_be_overridden {

View file

@ -13,6 +13,7 @@ mod error;
mod eval_queries;
mod fn_queries;
mod machine;
mod type_info;
mod valtrees;
pub use self::dummy_machine::*;

View file

@ -0,0 +1,175 @@
use rustc_abi::FieldIdx;
use rustc_hir::LangItem;
use rustc_middle::mir::interpret::CtfeProvenance;
use rustc_middle::span_bug;
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{self, ScalarInt, Ty};
use rustc_span::{Symbol, sym};
use crate::const_eval::CompileTimeMachine;
use crate::interpret::{
Immediate, InterpCx, InterpResult, MPlaceTy, MemoryKind, Writeable, interp_ok,
};
impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> {
/// Writes a `core::mem::type_info::TypeInfo` for a given type, `ty` to the given place.
pub(crate) fn write_type_info(
&mut self,
ty: Ty<'tcx>,
dest: &impl Writeable<'tcx, CtfeProvenance>,
) -> InterpResult<'tcx> {
let ty_struct = self.tcx.require_lang_item(LangItem::Type, self.tcx.span);
let ty_struct = self.tcx.type_of(ty_struct).no_bound_vars().unwrap();
assert_eq!(ty_struct, dest.layout().ty);
let ty_struct = ty_struct.ty_adt_def().unwrap().non_enum_variant();
// Fill all fields of the `TypeInfo` struct.
for (idx, field) in ty_struct.fields.iter_enumerated() {
let field_dest = self.project_field(dest, idx)?;
let downcast = |name: Symbol| {
let variants = field_dest.layout().ty.ty_adt_def().unwrap().variants();
let variant_id = variants
.iter_enumerated()
.find(|(_idx, var)| var.name == name)
.unwrap_or_else(|| panic!("got {name} but expected one of {variants:#?}"))
.0;
interp_ok((variant_id, self.project_downcast(&field_dest, variant_id)?))
};
match field.name {
sym::kind => {
let variant_index = match ty.kind() {
ty::Tuple(fields) => {
let (variant, variant_place) = downcast(sym::Tuple)?;
// project to the single tuple variant field of `type_info::Tuple` struct type
let tuple_place = self.project_field(&variant_place, FieldIdx::ZERO)?;
assert_eq!(
1,
tuple_place
.layout()
.ty
.ty_adt_def()
.unwrap()
.non_enum_variant()
.fields
.len()
);
self.write_tuple_fields(tuple_place, fields, ty)?;
variant
}
// For now just merge all primitives into one `Leaf` variant with no data
ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Char | ty::Bool => {
downcast(sym::Leaf)?.0
}
ty::Adt(_, _)
| ty::Foreign(_)
| ty::Str
| ty::Array(_, _)
| ty::Pat(_, _)
| ty::Slice(_)
| ty::RawPtr(..)
| ty::Ref(..)
| ty::FnDef(..)
| ty::FnPtr(..)
| ty::UnsafeBinder(..)
| ty::Dynamic(..)
| ty::Closure(..)
| ty::CoroutineClosure(..)
| ty::Coroutine(..)
| ty::CoroutineWitness(..)
| ty::Never
| ty::Alias(..)
| ty::Param(_)
| ty::Bound(..)
| ty::Placeholder(_)
| ty::Infer(..)
| ty::Error(_) => downcast(sym::Other)?.0,
};
self.write_discriminant(variant_index, &field_dest)?
}
sym::size => {
let layout = self.layout_of(ty)?;
let variant_index = if layout.is_sized() {
let (variant, variant_place) = downcast(sym::Some)?;
let size_field_place =
self.project_field(&variant_place, FieldIdx::ZERO)?;
self.write_scalar(
ScalarInt::try_from_target_usize(layout.size.bytes(), self.tcx.tcx)
.unwrap(),
&size_field_place,
)?;
variant
} else {
downcast(sym::None)?.0
};
self.write_discriminant(variant_index, &field_dest)?;
}
other => span_bug!(self.tcx.span, "unknown `Type` field {other}"),
}
}
interp_ok(())
}
pub(crate) fn write_tuple_fields(
&mut self,
tuple_place: impl Writeable<'tcx, CtfeProvenance>,
fields: &[Ty<'tcx>],
tuple_ty: Ty<'tcx>,
) -> InterpResult<'tcx> {
// project into the `type_info::Tuple::fields` field
let fields_slice_place = self.project_field(&tuple_place, FieldIdx::ZERO)?;
// get the `type_info::Field` type from `fields: &[Field]`
let field_type = fields_slice_place
.layout()
.ty
.builtin_deref(false)
.unwrap()
.sequence_element_type(self.tcx.tcx);
// Create an array with as many elements as the number of fields in the inspected tuple
let fields_layout =
self.layout_of(Ty::new_array(self.tcx.tcx, field_type, fields.len() as u64))?;
let fields_place = self.allocate(fields_layout, MemoryKind::Stack)?;
let mut fields_places = self.project_array_fields(&fields_place)?;
let tuple_layout = self.layout_of(tuple_ty)?;
while let Some((i, place)) = fields_places.next(self)? {
let field_ty = fields[i as usize];
self.write_field(field_ty, place, tuple_layout, i)?;
}
let fields_place = fields_place.map_provenance(CtfeProvenance::as_immutable);
let ptr = Immediate::new_slice(fields_place.ptr(), fields.len() as u64, self);
self.write_immediate(ptr, &fields_slice_place)
}
fn write_field(
&mut self,
field_ty: Ty<'tcx>,
place: MPlaceTy<'tcx>,
layout: TyAndLayout<'tcx>,
idx: u64,
) -> InterpResult<'tcx> {
for (field_idx, field_ty_field) in
place.layout.ty.ty_adt_def().unwrap().non_enum_variant().fields.iter_enumerated()
{
let field_place = self.project_field(&place, field_idx)?;
match field_ty_field.name {
sym::ty => self.write_type_id(field_ty, &field_place)?,
sym::offset => {
let offset = layout.fields.offset(idx as usize);
self.write_scalar(
ScalarInt::try_from_target_usize(offset.bytes(), self.tcx.tcx).unwrap(),
&field_place,
)?;
}
other => {
span_bug!(self.tcx.def_span(field_ty_field.did), "unimplemented field {other}")
}
}
}
interp_ok(())
}
}

View file

@ -27,6 +27,7 @@ use super::{
throw_ub_custom, throw_ub_format,
};
use crate::fluent_generated as fluent;
use crate::interpret::Writeable;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum MulAddType {
@ -68,10 +69,10 @@ pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (AllocId
}
impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// Generates a value of `TypeId` for `ty` in-place.
fn write_type_id(
pub(crate) fn write_type_id(
&mut self,
ty: Ty<'tcx>,
dest: &PlaceTy<'tcx, M::Provenance>,
dest: &impl Writeable<'tcx, M::Provenance>,
) -> InterpResult<'tcx, ()> {
let tcx = self.tcx;
let type_id_hash = tcx.type_id_hash(ty).as_u128();

View file

@ -235,7 +235,7 @@ declare_features! (
(accepted, generic_param_attrs, "1.27.0", Some(48848)),
/// Allows the `#[global_allocator]` attribute.
(accepted, global_allocator, "1.28.0", Some(27389)),
// FIXME: explain `globs`.
/// Allows globs imports (`use module::*;`) to import all public items from a module.
(accepted, globs, "1.0.0", None),
/// Allows using `..=X` as a pattern.
(accepted, half_open_range_patterns, "1.66.0", Some(67264)),

View file

@ -233,8 +233,6 @@ declare_features! (
(internal, link_cfg, "1.14.0", None),
/// Allows using `?Trait` trait bounds in more contexts.
(internal, more_maybe_bounds, "1.82.0", None),
/// Allows the `multiple_supertrait_upcastable` lint.
(unstable, multiple_supertrait_upcastable, "1.69.0", None),
/// Allow negative trait bounds. This is an internal-only feature for testing the trait solver!
(internal, negative_bounds, "1.71.0", None),
/// Set the maximum pattern complexity allowed (not limited by default).
@ -569,6 +567,8 @@ declare_features! (
(unstable, more_qualified_paths, "1.54.0", Some(86935)),
/// The `movrs` target feature on x86.
(unstable, movrs_target_feature, "1.88.0", Some(137976)),
/// Allows the `multiple_supertrait_upcastable` lint.
(unstable, multiple_supertrait_upcastable, "1.69.0", Some(150833)),
/// Allows the `#[must_not_suspend]` attribute.
(unstable, must_not_suspend, "1.57.0", Some(83310)),
/// Allows `mut ref` and `mut ref mut` identifier patterns.

View file

@ -11,7 +11,7 @@ use rustc_error_messages::{DiagArgValue, IntoDiagArg};
use rustc_macros::{Decodable, Encodable, HashStable_Generic, PrintAttribute};
use rustc_span::def_id::DefId;
use rustc_span::hygiene::Transparency;
use rustc_span::{Ident, Span, Symbol};
use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol};
pub use rustc_target::spec::SanitizerSet;
use thin_vec::ThinVec;
@ -19,9 +19,22 @@ use crate::attrs::pretty_printing::PrintAttribute;
use crate::limit::Limit;
use crate::{DefaultBodyStability, PartialConstStability, RustcVersion, Stability};
#[derive(Copy, Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)]
pub enum EiiImplResolution {
/// Usually, finding the extern item that an EII implementation implements means finding
/// the defid of the associated attribute macro, and looking at *its* attributes to find
/// what foreign item its associated with.
Macro(DefId),
/// Sometimes though, we already know statically and can skip some name resolution.
/// Stored together with the eii's name for diagnostics.
Known(EiiDecl),
/// For when resolution failed, but we want to continue compilation
Error(ErrorGuaranteed),
}
#[derive(Copy, Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)]
pub struct EiiImpl {
pub eii_macro: DefId,
pub resolution: EiiImplResolution,
pub impl_marked_unsafe: bool,
pub span: Span,
pub inner_span: Span,
@ -33,7 +46,7 @@ pub struct EiiDecl {
pub eii_extern_target: DefId,
/// whether or not it is unsafe to implement this EII
pub impl_unsafe: bool,
pub span: Span,
pub name: Ident,
}
#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, PrintAttribute)]
@ -867,6 +880,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_coherence_is_core]`
RustcCoherenceIsCore(Span),
/// Represents `#[rustc_has_incoherent_inherent_impls]`
RustcHasIncoherentInherentImpls,
/// Represents `#[rustc_layout_scalar_valid_range_end]`.
RustcLayoutScalarValidRangeEnd(Box<u128>, Span),

View file

@ -95,6 +95,7 @@ impl AttributeKind {
Repr { .. } => No,
RustcBuiltinMacro { .. } => Yes,
RustcCoherenceIsCore(..) => No,
RustcHasIncoherentInherentImpls => Yes,
RustcLayoutScalarValidRangeEnd(..) => Yes,
RustcLayoutScalarValidRangeStart(..) => Yes,
RustcLegacyConstGenerics { .. } => Yes,

View file

@ -518,6 +518,7 @@ pub enum ConstArgKind<'hir, Unambig = ()> {
/// This variant is not always used to represent inference consts, sometimes
/// [`GenericArg::Infer`] is used instead.
Infer(Unambig),
Literal(LitKind),
}
#[derive(Clone, Copy, Debug, HashStable_Generic)]
@ -1400,6 +1401,14 @@ impl AttributeExt for Attribute {
}
}
#[inline]
fn deprecation_note(&self) -> Option<Symbol> {
match &self {
Attribute::Parsed(AttributeKind::Deprecation { deprecation, .. }) => deprecation.note,
_ => None,
}
}
fn is_automatically_derived_attr(&self) -> bool {
matches!(self, Attribute::Parsed(AttributeKind::AutomaticallyDerived(..)))
}
@ -2616,6 +2625,12 @@ impl Expr<'_> {
// them being used only for its side-effects.
base.can_have_side_effects()
}
ExprKind::Binary(_, lhs, rhs) => {
// This isn't exactly true for all `Binary`, but we are using this
// method exclusively for diagnostics and there's a *cultural* pressure against
// them being used only for its side-effects.
lhs.can_have_side_effects() || rhs.can_have_side_effects()
}
ExprKind::Struct(_, fields, init) => {
let init_side_effects = match init {
StructTailExpr::Base(init) => init.can_have_side_effects(),
@ -2638,13 +2653,13 @@ impl Expr<'_> {
},
args,
) => args.iter().any(|arg| arg.can_have_side_effects()),
ExprKind::Repeat(arg, _) => arg.can_have_side_effects(),
ExprKind::If(..)
| ExprKind::Match(..)
| ExprKind::MethodCall(..)
| ExprKind::Call(..)
| ExprKind::Closure { .. }
| ExprKind::Block(..)
| ExprKind::Repeat(..)
| ExprKind::Break(..)
| ExprKind::Continue(..)
| ExprKind::Ret(..)
@ -2655,7 +2670,6 @@ impl Expr<'_> {
| ExprKind::InlineAsm(..)
| ExprKind::AssignOp(..)
| ExprKind::ConstBlock(..)
| ExprKind::Binary(..)
| ExprKind::Yield(..)
| ExprKind::DropTemps(..)
| ExprKind::Err(_) => true,

View file

@ -1104,6 +1104,7 @@ pub fn walk_const_arg<'v, V: Visitor<'v>>(
ConstArgKind::Path(qpath) => visitor.visit_qpath(qpath, *hir_id, qpath.span()),
ConstArgKind::Anon(anon) => visitor.visit_anon_const(*anon),
ConstArgKind::Error(_) => V::Result::output(), // errors and spans are not important
ConstArgKind::Literal(..) => V::Result::output(), // FIXME(mcga)
}
}

View file

@ -278,6 +278,7 @@ language_item_table! {
PartialOrd, sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
CVoid, sym::c_void, c_void, Target::Enum, GenericRequirement::None;
Type, sym::type_info, type_struct, Target::Struct, GenericRequirement::None;
TypeId, sym::type_id, type_id, Target::Struct, GenericRequirement::None;
// A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and

View file

@ -32,7 +32,7 @@ pub mod lints;
pub mod pat_util;
mod stability;
mod stable_hash_impls;
mod target;
pub mod target;
pub mod weak_lang_items;
#[cfg(test)]

View file

@ -32,21 +32,21 @@ use crate::errors::{EiiWithGenerics, LifetimesOrBoundsMismatchOnEii};
pub(crate) fn compare_eii_function_types<'tcx>(
tcx: TyCtxt<'tcx>,
external_impl: LocalDefId,
declaration: DefId,
foreign_item: DefId,
eii_name: Symbol,
eii_attr_span: Span,
) -> Result<(), ErrorGuaranteed> {
check_is_structurally_compatible(tcx, external_impl, declaration, eii_name, eii_attr_span)?;
check_is_structurally_compatible(tcx, external_impl, foreign_item, eii_name, eii_attr_span)?;
let external_impl_span = tcx.def_span(external_impl);
let cause = ObligationCause::new(
external_impl_span,
external_impl,
ObligationCauseCode::CompareEii { external_impl, declaration },
ObligationCauseCode::CompareEii { external_impl, declaration: foreign_item },
);
// FIXME(eii): even if we don't support generic functions, we should support explicit outlive bounds here
let param_env = tcx.param_env(declaration);
let param_env = tcx.param_env(foreign_item);
let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(infcx);
@ -62,7 +62,7 @@ pub(crate) fn compare_eii_function_types<'tcx>(
let mut wf_tys = FxIndexSet::default();
let norm_cause = ObligationCause::misc(external_impl_span, external_impl);
let declaration_sig = tcx.fn_sig(declaration).instantiate_identity();
let declaration_sig = tcx.fn_sig(foreign_item).instantiate_identity();
let declaration_sig = tcx.liberate_late_bound_regions(external_impl.into(), declaration_sig);
debug!(?declaration_sig);
@ -103,7 +103,7 @@ pub(crate) fn compare_eii_function_types<'tcx>(
cause,
param_env,
terr,
(declaration, declaration_sig),
(foreign_item, declaration_sig),
(external_impl, external_impl_sig),
eii_attr_span,
eii_name,

View file

@ -213,6 +213,7 @@ fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hi
| sym::type_id
| sym::type_id_eq
| sym::type_name
| sym::type_of
| sym::ub_checks
| sym::variant_count
| sym::vtable_for
@ -308,13 +309,22 @@ pub(crate) fn check_intrinsic_type(
sym::needs_drop => (1, 0, vec![], tcx.types.bool),
sym::type_name => (1, 0, vec![], Ty::new_static_str(tcx)),
sym::type_id => {
(1, 0, vec![], tcx.type_of(tcx.lang_items().type_id().unwrap()).instantiate_identity())
}
sym::type_id => (
1,
0,
vec![],
tcx.type_of(tcx.lang_items().type_id().unwrap()).no_bound_vars().unwrap(),
),
sym::type_id_eq => {
let type_id = tcx.type_of(tcx.lang_items().type_id().unwrap()).instantiate_identity();
let type_id = tcx.type_of(tcx.lang_items().type_id().unwrap()).no_bound_vars().unwrap();
(0, 0, vec![type_id, type_id], tcx.types.bool)
}
sym::type_of => (
0,
0,
vec![tcx.type_of(tcx.lang_items().type_id().unwrap()).no_bound_vars().unwrap()],
tcx.type_of(tcx.lang_items().type_struct().unwrap()).no_bound_vars().unwrap(),
),
sym::offload => (
3,
0,

View file

@ -6,7 +6,7 @@ use rustc_abi::{ExternAbi, ScalableElt};
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_errors::codes::*;
use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err};
use rustc_hir::attrs::{AttributeKind, EiiDecl, EiiImpl};
use rustc_hir::attrs::{AttributeKind, EiiDecl, EiiImpl, EiiImplResolution};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::lang_items::LangItem;
@ -1196,27 +1196,28 @@ fn check_item_fn(
fn check_eiis(tcx: TyCtxt<'_>, def_id: LocalDefId) {
// does the function have an EiiImpl attribute? that contains the defid of a *macro*
// that was used to mark the implementation. This is a two step process.
for EiiImpl { eii_macro, span, .. } in
for EiiImpl { resolution, span, .. } in
find_attr!(tcx.get_all_attrs(def_id), AttributeKind::EiiImpls(impls) => impls)
.into_iter()
.flatten()
{
// we expect this macro to have the `EiiMacroFor` attribute, that points to a function
// signature that we'd like to compare the function we're currently checking with
if let Some(eii_extern_target) = find_attr!(tcx.get_all_attrs(*eii_macro), AttributeKind::EiiExternTarget(EiiDecl {eii_extern_target, ..}) => *eii_extern_target)
{
let _ = compare_eii_function_types(
tcx,
def_id,
eii_extern_target,
tcx.item_name(*eii_macro),
*span,
);
} else {
panic!(
"EII impl macro {eii_macro:?} did not have an eii extern target attribute pointing to a foreign function"
)
}
let (foreign_item, name) = match resolution {
EiiImplResolution::Macro(def_id) => {
// we expect this macro to have the `EiiMacroFor` attribute, that points to a function
// signature that we'd like to compare the function we're currently checking with
if let Some(foreign_item) = find_attr!(tcx.get_all_attrs(*def_id), AttributeKind::EiiExternTarget(EiiDecl {eii_extern_target: t, ..}) => *t)
{
(foreign_item, tcx.item_name(*def_id))
} else {
tcx.dcx().span_delayed_bug(*span, "resolved to something that's not an EII");
continue;
}
}
EiiImplResolution::Known(decl) => (decl.eii_extern_target, decl.name.name),
EiiImplResolution::Error(_eg) => continue,
};
let _ = compare_eii_function_types(tcx, def_id, foreign_item, name, *span);
}
}

View file

@ -15,7 +15,7 @@ use rustc_hir::find_attr;
use rustc_middle::bug;
use rustc_middle::ty::fast_reject::{SimplifiedType, TreatParams, simplify_type};
use rustc_middle::ty::{self, CrateInherentImpls, Ty, TyCtxt};
use rustc_span::{ErrorGuaranteed, sym};
use rustc_span::ErrorGuaranteed;
use crate::errors;
@ -79,13 +79,15 @@ impl<'tcx> InherentCollect<'tcx> {
}
if self.tcx.features().rustc_attrs() {
let items = self.tcx.associated_item_def_ids(impl_def_id);
if !self.tcx.has_attr(ty_def_id, sym::rustc_has_incoherent_inherent_impls) {
if !find_attr!(
self.tcx.get_all_attrs(ty_def_id),
AttributeKind::RustcHasIncoherentInherentImpls
) {
let impl_span = self.tcx.def_span(impl_def_id);
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutside { span: impl_span }));
}
let items = self.tcx.associated_item_def_ids(impl_def_id);
for &impl_item in items {
if !find_attr!(
self.tcx.get_all_attrs(impl_item),

View file

@ -420,7 +420,22 @@ fn infer_placeholder_type<'tcx>(
kind: &'static str,
) -> Ty<'tcx> {
let tcx = cx.tcx();
let ty = tcx.typeck(def_id).node_type(hir_id);
// If the type is omitted on a #[type_const] we can't run
// type check on since that requires the const have a body
// which type_consts don't.
let ty = if tcx.is_type_const(def_id.to_def_id()) {
if let Some(trait_item_def_id) = tcx.trait_item_of(def_id.to_def_id()) {
tcx.type_of(trait_item_def_id).instantiate_identity()
} else {
Ty::new_error_with_message(
tcx,
ty_span,
"constant with #[type_const] requires an explicit type",
)
}
} else {
tcx.typeck(def_id).node_type(hir_id)
};
// If this came from a free `const` or `static mut?` item,
// then the user may have written e.g. `const A = 42;`.

View file

@ -22,6 +22,7 @@ pub mod generics;
use std::assert_matches::assert_matches;
use std::slice;
use rustc_ast::LitKind;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_errors::codes::*;
use rustc_errors::{
@ -2391,6 +2392,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
hir::ConstArgKind::Anon(anon) => self.lower_const_arg_anon(anon),
hir::ConstArgKind::Infer(()) => self.ct_infer(None, const_arg.span),
hir::ConstArgKind::Error(e) => ty::Const::new_error(tcx, e),
hir::ConstArgKind::Literal(kind) if let FeedConstTy::WithTy(anon_const_type) = feed => {
self.lower_const_arg_literal(&kind, anon_const_type, const_arg.span)
}
hir::ConstArgKind::Literal(..) => {
let e = self.dcx().span_err(const_arg.span, "literal of unknown type");
ty::Const::new_error(tcx, e)
}
}
}
@ -2773,6 +2781,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
}
}
#[instrument(skip(self), level = "debug")]
fn lower_const_arg_literal(&self, kind: &LitKind, ty: Ty<'tcx>, span: Span) -> Const<'tcx> {
let tcx = self.tcx();
let input = LitToConstInput { lit: *kind, ty, neg: false };
tcx.at(span).lit_to_const(input)
}
#[instrument(skip(self), level = "debug")]
fn try_lower_anon_const_lit(
&self,

View file

@ -21,7 +21,7 @@ use rustc_hir::{
GenericParam, GenericParamKind, HirId, ImplicitSelfKind, LifetimeParamKind, Node, PatKind,
PreciseCapturingArg, RangeEnd, Term, TyPatKind,
};
use rustc_span::source_map::SourceMap;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::{DUMMY_SP, FileName, Ident, Span, Symbol, kw, sym};
use {rustc_ast as ast, rustc_hir as hir};
@ -1157,6 +1157,10 @@ impl<'a> State<'a> {
ConstArgKind::Anon(anon) => self.print_anon_const(anon),
ConstArgKind::Error(_) => self.word("/*ERROR*/"),
ConstArgKind::Infer(..) => self.word("_"),
ConstArgKind::Literal(node) => {
let span = const_arg.span;
self.print_literal(&Spanned { span, node: *node })
}
}
}

View file

@ -54,7 +54,7 @@ use rustc_middle::ty::adjustment::{
};
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Span};
use rustc_span::{BytePos, DUMMY_SP, Span};
use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::solve::inspect::{self, InferCtxtProofTreeExt, ProofTreeVisitor};
use rustc_trait_selection::solve::{Certainty, Goal, NoSolution};
@ -1828,10 +1828,9 @@ impl<'tcx> CoerceMany<'tcx> {
// If the block is from an external macro or try (`?`) desugaring, then
// do not suggest adding a semicolon, because there's nowhere to put it.
// See issues #81943 and #87051.
&& matches!(
cond_expr.span.desugaring_kind(),
None | Some(DesugaringKind::WhileLoop)
)
// Similarly, if the block is from a loop desugaring, then also do not
// suggest adding a semicolon. See issue #150850.
&& cond_expr.span.desugaring_kind().is_none()
&& !cond_expr.span.in_external_macro(fcx.tcx.sess.source_map())
&& !matches!(
cond_expr.kind,

View file

@ -10,8 +10,8 @@ use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::lang_items::LangItem;
use rustc_hir::{
self as hir, Arm, CoroutineDesugaring, CoroutineKind, CoroutineSource, Expr, ExprKind,
GenericBound, HirId, Node, PatExpr, PatExprKind, Path, QPath, Stmt, StmtKind, TyKind,
WherePredicateKind, expr_needs_parens, is_range_literal,
GenericBound, HirId, LoopSource, Node, PatExpr, PatExprKind, Path, QPath, Stmt, StmtKind,
TyKind, WherePredicateKind, expr_needs_parens, is_range_literal,
};
use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer;
use rustc_hir_analysis::suggest_impl_trait;
@ -1170,15 +1170,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
let found = self.resolve_vars_if_possible(found);
let in_loop = self.is_loop(id)
|| self
.tcx
let innermost_loop = if self.is_loop(id) {
Some(self.tcx.hir_node(id))
} else {
self.tcx
.hir_parent_iter(id)
.take_while(|(_, node)| {
// look at parents until we find the first body owner
node.body_id().is_none()
})
.any(|(parent_id, _)| self.is_loop(parent_id));
.find_map(|(parent_id, node)| self.is_loop(parent_id).then_some(node))
};
let can_break_with_value = innermost_loop.is_some_and(|node| {
matches!(
node,
Node::Expr(Expr { kind: ExprKind::Loop(_, _, LoopSource::Loop, ..), .. })
)
});
let in_local_statement = self.is_local_statement(id)
|| self
@ -1186,7 +1194,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.hir_parent_iter(id)
.any(|(parent_id, _)| self.is_local_statement(parent_id));
if in_loop && in_local_statement {
if can_break_with_value && in_local_statement {
err.multipart_suggestion(
"you might have meant to break the loop with this value",
vec![

View file

@ -289,7 +289,7 @@ struct Canonicalizer<'cx, 'tcx> {
/// Set to `None` to disable the resolution of inference variables.
infcx: Option<&'cx InferCtxt<'tcx>>,
tcx: TyCtxt<'tcx>,
variables: SmallVec<[CanonicalVarKind<'tcx>; 8]>,
var_kinds: SmallVec<[CanonicalVarKind<'tcx>; 8]>,
query_state: &'cx mut OriginalQueryValues<'tcx>,
// Note that indices is only used once `var_values` is big enough to be
// heap-allocated.
@ -507,7 +507,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
{
let base = Canonical {
max_universe: ty::UniverseIndex::ROOT,
variables: List::empty(),
var_kinds: List::empty(),
value: (),
};
Canonicalizer::canonicalize_with_base(
@ -548,7 +548,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
tcx,
canonicalize_mode: canonicalize_region_mode,
needs_canonical_flags,
variables: SmallVec::from_slice(base.variables),
var_kinds: SmallVec::from_slice(base.var_kinds),
query_state,
indices: FxHashMap::default(),
sub_root_lookup_table: Default::default(),
@ -569,16 +569,16 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
// anymore.
debug_assert!(!out_value.has_infer() && !out_value.has_placeholders());
let canonical_variables =
tcx.mk_canonical_var_kinds(&canonicalizer.universe_canonicalized_variables());
let canonical_var_kinds =
tcx.mk_canonical_var_kinds(&canonicalizer.universe_canonicalized_var_kinds());
let max_universe = canonical_variables
let max_universe = canonical_var_kinds
.iter()
.map(|cvar| cvar.universe())
.max()
.unwrap_or(ty::UniverseIndex::ROOT);
Canonical { max_universe, variables: canonical_variables, value: (base.value, out_value) }
Canonical { max_universe, var_kinds: canonical_var_kinds, value: (base.value, out_value) }
}
/// Creates a canonical variable replacing `kind` from the input,
@ -590,7 +590,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
var_kind: CanonicalVarKind<'tcx>,
value: GenericArg<'tcx>,
) -> BoundVar {
let Canonicalizer { variables, query_state, indices, .. } = self;
let Canonicalizer { var_kinds, query_state, indices, .. } = self;
let var_values = &mut query_state.var_values;
@ -607,7 +607,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
}
}
// This code is hot. `variables` and `var_values` are usually small
// This code is hot. `var_kinds` and `var_values` are usually small
// (fewer than 8 elements ~95% of the time). They are SmallVec's to
// avoid allocations in those cases. We also don't use `indices` to
// determine if a kind has been seen before until the limit of 8 has
@ -620,10 +620,10 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
BoundVar::new(idx)
} else {
// `kind` isn't present in `var_values`. Append it. Likewise
// for `var_kind` and `variables`.
variables.push(var_kind);
// for `var_kind` and `var_kinds`.
var_kinds.push(var_kind);
var_values.push(value);
assert_eq!(variables.len(), var_values.len());
assert_eq!(var_kinds.len(), var_values.len());
// If `var_values` has become big enough to be heap-allocated,
// fill up `indices` to facilitate subsequent lookups.
@ -641,10 +641,10 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
} else {
// `var_values` is large. Do a hashmap search via `indices`.
*indices.entry(value).or_insert_with(|| {
variables.push(var_kind);
var_kinds.push(var_kind);
var_values.push(value);
assert_eq!(variables.len(), var_values.len());
BoundVar::new(variables.len() - 1)
assert_eq!(var_kinds.len(), var_values.len());
BoundVar::new(var_kinds.len() - 1)
})
}
}
@ -652,16 +652,16 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
fn get_or_insert_sub_root(&mut self, vid: ty::TyVid) -> ty::BoundVar {
let root_vid = self.infcx.unwrap().sub_unification_table_root_var(vid);
let idx =
*self.sub_root_lookup_table.entry(root_vid).or_insert_with(|| self.variables.len());
*self.sub_root_lookup_table.entry(root_vid).or_insert_with(|| self.var_kinds.len());
ty::BoundVar::from(idx)
}
/// Replaces the universe indexes used in `var_values` with their index in
/// `query_state.universe_map`. This minimizes the maximum universe used in
/// the canonicalized value.
fn universe_canonicalized_variables(self) -> SmallVec<[CanonicalVarKind<'tcx>; 8]> {
fn universe_canonicalized_var_kinds(self) -> SmallVec<[CanonicalVarKind<'tcx>; 8]> {
if self.query_state.universe_map.len() == 1 {
return self.variables;
return self.var_kinds;
}
let reverse_universe_map: FxHashMap<ty::UniverseIndex, ty::UniverseIndex> = self
@ -672,7 +672,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
.map(|(idx, universe)| (*universe, ty::UniverseIndex::from_usize(idx)))
.collect();
self.variables
self.var_kinds
.iter()
.map(|&kind| match kind {
CanonicalVarKind::Int | CanonicalVarKind::Float => {

View file

@ -43,7 +43,7 @@ impl<'tcx, V> Canonical<'tcx, V> {
where
T: TypeFoldable<TyCtxt<'tcx>>,
{
assert_eq!(self.variables.len(), var_values.len());
assert_eq!(self.var_kinds.len(), var_values.len());
let value = projection_fn(&self.value);
instantiate_value(tcx, var_values, value)
}

View file

@ -68,7 +68,7 @@ impl<'tcx> InferCtxt<'tcx> {
.collect();
let var_values =
CanonicalVarValues::instantiate(self.tcx, &canonical.variables, |var_values, info| {
CanonicalVarValues::instantiate(self.tcx, &canonical.var_kinds, |var_values, info| {
self.instantiate_canonical_var(span, info, &var_values, |ui| universes[ui])
});
let result = canonical.instantiate(self.tcx, &var_values);

View file

@ -430,7 +430,7 @@ impl<'tcx> InferCtxt<'tcx> {
// result, then we can type the corresponding value from the
// input. See the example above.
let mut opt_values: IndexVec<BoundVar, Option<GenericArg<'tcx>>> =
IndexVec::from_elem_n(None, query_response.variables.len());
IndexVec::from_elem_n(None, query_response.var_kinds.len());
for (original_value, result_value) in iter::zip(&original_values.var_values, result_values)
{
@ -442,7 +442,7 @@ impl<'tcx> InferCtxt<'tcx> {
// more involved. They are also a lot rarer than region variables.
if let ty::Bound(index_kind, b) = *result_value.kind()
&& !matches!(
query_response.variables[b.var.as_usize()],
query_response.var_kinds[b.var.as_usize()],
CanonicalVarKind::Ty { .. }
)
{
@ -472,8 +472,8 @@ impl<'tcx> InferCtxt<'tcx> {
// given variable in the loop above, use that. Otherwise, use
// a fresh inference variable.
let tcx = self.tcx;
let variables = query_response.variables;
let var_values = CanonicalVarValues::instantiate(tcx, variables, |var_values, kind| {
let var_kinds = query_response.var_kinds;
let var_values = CanonicalVarValues::instantiate(tcx, var_kinds, |var_values, kind| {
if kind.universe() != ty::UniverseIndex::ROOT {
// A variable from inside a binder of the query. While ideally these shouldn't
// exist at all, we have to deal with them for now.

View file

@ -1190,6 +1190,8 @@ impl UnusedParens {
// `&(a..=b)`, there is a recursive `check_pat` on `a` and `b`, but we will assume
// that if there are unnecessary parens they serve a purpose of readability.
PatKind::Range(..) => return,
// Parentheses may be necessary to disambiguate precedence in guard patterns.
PatKind::Guard(..) => return,
// Avoid `p0 | .. | pn` if we should.
PatKind::Or(..) if avoid_or => return,
// Avoid `mut x` and `mut x @ p` if we should:

View file

@ -1,5 +1,5 @@
use rustc_data_structures::fx::FxIndexMap;
use rustc_hir::attrs::{AttributeKind, EiiDecl, EiiImpl};
use rustc_hir::attrs::{AttributeKind, EiiDecl, EiiImpl, EiiImplResolution};
use rustc_hir::def_id::DefId;
use rustc_hir::find_attr;
use rustc_middle::query::LocalCrate;
@ -9,7 +9,7 @@ use rustc_middle::ty::TyCtxt;
pub(crate) type EiiMapEncodedKeyValue = (DefId, (EiiDecl, Vec<(DefId, EiiImpl)>));
pub(crate) type EiiMap = FxIndexMap<
DefId, // the defid of the macro that declared the eii
DefId, // the defid of the foreign item associated with the eii
(
// the corresponding declaration
EiiDecl,
@ -28,18 +28,34 @@ pub(crate) fn collect<'tcx>(tcx: TyCtxt<'tcx>, LocalCrate: LocalCrate) -> EiiMap
for i in
find_attr!(tcx.get_all_attrs(id), AttributeKind::EiiImpls(e) => e).into_iter().flatten()
{
eiis.entry(i.eii_macro)
.or_insert_with(|| {
let decl = match i.resolution {
EiiImplResolution::Macro(macro_defid) => {
// find the decl for this one if it wasn't in yet (maybe it's from the local crate? not very useful but not illegal)
(find_attr!(tcx.get_all_attrs(i.eii_macro), AttributeKind::EiiExternTarget(d) => *d).unwrap(), Default::default())
}).1.insert(id.into(), *i);
let Some(decl) = find_attr!(tcx.get_all_attrs(macro_defid), AttributeKind::EiiExternTarget(d) => *d)
else {
// skip if it doesn't have eii_extern_target (if we resolved to another macro that's not an EII)
tcx.dcx()
.span_delayed_bug(i.span, "resolved to something that's not an EII");
continue;
};
decl
}
EiiImplResolution::Known(decl) => decl,
EiiImplResolution::Error(_eg) => continue,
};
// FIXME(eii) remove extern target from encoded decl
eiis.entry(decl.eii_extern_target)
.or_insert_with(|| (decl, Default::default()))
.1
.insert(id.into(), *i);
}
// if we find a new declaration, add it to the list without a known implementation
if let Some(decl) =
find_attr!(tcx.get_all_attrs(id), AttributeKind::EiiExternTarget(d) => *d)
{
eiis.entry(id.into()).or_insert((decl, Default::default()));
eiis.entry(decl.eii_extern_target).or_insert((decl, Default::default()));
}
}

View file

@ -1444,6 +1444,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
| hir::ConstArgKind::TupleCall(..)
| hir::ConstArgKind::Tup(..)
| hir::ConstArgKind::Path(..)
| hir::ConstArgKind::Literal(..)
| hir::ConstArgKind::Infer(..) => true,
hir::ConstArgKind::Anon(..) => false,
},
@ -1656,9 +1657,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
empty_proc_macro!(self);
let externally_implementable_items = self.tcx.externally_implementable_items(LOCAL_CRATE);
self.lazy_array(externally_implementable_items.iter().map(|(decl_did, (decl, impls))| {
(*decl_did, (decl.clone(), impls.iter().map(|(impl_did, i)| (*impl_did, *i)).collect()))
}))
self.lazy_array(externally_implementable_items.iter().map(
|(foreign_item, (decl, impls))| {
(
*foreign_item,
(decl.clone(), impls.iter().map(|(impl_did, i)| (*impl_did, *i)).collect()),
)
},
))
}
#[instrument(level = "trace", skip(self))]

View file

@ -169,7 +169,7 @@ impl<'tcx> CanonicalParamEnvCache<'tcx> {
) {
return Canonical {
max_universe: ty::UniverseIndex::ROOT,
variables: List::empty(),
var_kinds: List::empty(),
value: key,
};
}

View file

@ -2,6 +2,7 @@ use std::borrow::Cow;
use rustc_abi::Align;
use rustc_hir::attrs::{InlineAttr, InstructionSetAttr, Linkage, OptimizeAttr, RtsanSetting};
use rustc_hir::def_id::DefId;
use rustc_macros::{HashStable, TyDecodable, TyEncodable};
use rustc_span::Symbol;
use rustc_target::spec::SanitizerSet;
@ -72,7 +73,7 @@ pub struct CodegenFnAttrs {
/// generate this function under its real name,
/// but *also* under the same name as this foreign function so that the foreign function has an implementation.
// FIXME: make "SymbolName<'tcx>"
pub foreign_item_symbol_aliases: Vec<(Symbol, Linkage, Visibility)>,
pub foreign_item_symbol_aliases: Vec<(DefId, Linkage, Visibility)>,
/// The `#[link_ordinal = "..."]` attribute, indicating an ordinal an
/// imported function has in the dynamic library. Note that this must not
/// be set when `link_name` is set. This is for foreign items with the

View file

@ -20,8 +20,8 @@ use rustc_span::hygiene::{
};
use rustc_span::source_map::Spanned;
use rustc_span::{
BlobDecoder, BytePos, ByteSymbol, CachingSourceMapView, ExpnData, ExpnHash, Pos,
RelativeBytePos, SourceFile, Span, SpanDecoder, SpanEncoder, StableSourceFileId, Symbol,
BlobDecoder, BytePos, ByteSymbol, CachingSourceMapView, ExpnData, ExpnHash, RelativeBytePos,
SourceFile, Span, SpanDecoder, SpanEncoder, StableSourceFileId, Symbol,
};
use crate::dep_graph::{DepNodeIndex, SerializedDepNodeIndex};
@ -652,7 +652,10 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> {
let dto = u32::decode(self);
let enclosing = self.tcx.source_span_untracked(parent.unwrap()).data_untracked();
(enclosing.lo + BytePos::from_u32(dlo), enclosing.lo + BytePos::from_u32(dto))
(
BytePos(enclosing.lo.0.wrapping_add(dlo)),
BytePos(enclosing.lo.0.wrapping_add(dto)),
)
}
TAG_FULL_SPAN => {
let file_lo_index = SourceFileIndex::decode(self);
@ -894,30 +897,33 @@ impl<'a, 'tcx> SpanEncoder for CacheEncoder<'a, 'tcx> {
return TAG_PARTIAL_SPAN.encode(self);
}
if let Some(parent) = span_data.parent {
let enclosing = self.tcx.source_span_untracked(parent).data_untracked();
if enclosing.contains(span_data) {
TAG_RELATIVE_SPAN.encode(self);
(span_data.lo - enclosing.lo).to_u32().encode(self);
(span_data.hi - enclosing.lo).to_u32().encode(self);
return;
}
let parent =
span_data.parent.map(|parent| self.tcx.source_span_untracked(parent).data_untracked());
if let Some(parent) = parent
&& parent.contains(span_data)
{
TAG_RELATIVE_SPAN.encode(self);
(span_data.lo.0.wrapping_sub(parent.lo.0)).encode(self);
(span_data.hi.0.wrapping_sub(parent.lo.0)).encode(self);
return;
}
let pos = self.source_map.byte_pos_to_line_and_col(span_data.lo);
let partial_span = match &pos {
Some((file_lo, _, _)) => !file_lo.contains(span_data.hi),
None => true,
let Some((file_lo, line_lo, col_lo)) =
self.source_map.byte_pos_to_line_and_col(span_data.lo)
else {
return TAG_PARTIAL_SPAN.encode(self);
};
if partial_span {
return TAG_PARTIAL_SPAN.encode(self);
if let Some(parent) = parent
&& file_lo.contains(parent.lo)
{
TAG_RELATIVE_SPAN.encode(self);
(span_data.lo.0.wrapping_sub(parent.lo.0)).encode(self);
(span_data.hi.0.wrapping_sub(parent.lo.0)).encode(self);
return;
}
let (file_lo, line_lo, col_lo) = pos.unwrap();
let len = span_data.hi - span_data.lo;
let source_file_index = self.source_file_index(file_lo);
TAG_FULL_SPAN.encode(self);

View file

@ -643,10 +643,26 @@ pub struct FieldPat<'tcx> {
pub pattern: Pat<'tcx>,
}
/// Additional per-node data that is not present on most THIR pattern nodes.
#[derive(Clone, Debug, Default, HashStable, TypeVisitable)]
pub struct PatExtra<'tcx> {
/// If present, this node represents a named constant that was lowered to
/// a pattern using `const_to_pat`.
///
/// This is used by some diagnostics for non-exhaustive matches, to map
/// the pattern node back to the `DefId` of its original constant.
pub expanded_const: Option<DefId>,
/// User-written types that must be preserved into MIR so that they can be
/// checked.
pub ascriptions: Vec<Ascription<'tcx>>,
}
#[derive(Clone, Debug, HashStable, TypeVisitable)]
pub struct Pat<'tcx> {
pub ty: Ty<'tcx>,
pub span: Span,
pub extra: Option<Box<PatExtra<'tcx>>>,
pub kind: PatKind<'tcx>,
}
@ -762,11 +778,6 @@ pub enum PatKind<'tcx> {
/// A wildcard pattern: `_`.
Wild,
AscribeUserType {
ascription: Ascription<'tcx>,
subpattern: Box<Pat<'tcx>>,
},
/// `x`, `ref x`, `x @ P`, etc.
Binding {
name: Symbol,
@ -831,21 +842,6 @@ pub enum PatKind<'tcx> {
value: ty::Value<'tcx>,
},
/// Wrapper node representing a named constant that was lowered to a pattern
/// using `const_to_pat`.
///
/// This is used by some diagnostics for non-exhaustive matches, to map
/// the pattern node back to the `DefId` of its original constant.
///
/// FIXME(#150498): Can we make this an `Option<DefId>` field on `Pat`
/// instead, so that non-diagnostic code can ignore it more easily?
ExpandedConstant {
/// [DefId] of the constant item.
def_id: DefId,
/// The pattern that the constant lowered to.
subpattern: Box<Pat<'tcx>>,
},
Range(Arc<PatRange<'tcx>>),
/// Matches against a slice, checking the length and extracting elements.
@ -1119,7 +1115,7 @@ mod size_asserts {
static_assert_size!(Block, 48);
static_assert_size!(Expr<'_>, 64);
static_assert_size!(ExprKind<'_>, 40);
static_assert_size!(Pat<'_>, 64);
static_assert_size!(Pat<'_>, 72);
static_assert_size!(PatKind<'_>, 48);
static_assert_size!(Stmt<'_>, 48);
static_assert_size!(StmtKind<'_>, 48);

View file

@ -259,7 +259,7 @@ pub(crate) fn for_each_immediate_subpat<'a, 'tcx>(
pat: &'a Pat<'tcx>,
mut callback: impl FnMut(&'a Pat<'tcx>),
) {
let Pat { kind, ty: _, span: _ } = pat;
let Pat { kind, ty: _, span: _, extra: _ } = pat;
match kind {
PatKind::Missing
| PatKind::Wild
@ -269,11 +269,9 @@ pub(crate) fn for_each_immediate_subpat<'a, 'tcx>(
| PatKind::Never
| PatKind::Error(_) => {}
PatKind::AscribeUserType { subpattern, .. }
| PatKind::Binding { subpattern: Some(subpattern), .. }
PatKind::Binding { subpattern: Some(subpattern), .. }
| PatKind::Deref { subpattern }
| PatKind::DerefPattern { subpattern, .. }
| PatKind::ExpandedConstant { subpattern, .. } => callback(subpattern),
| PatKind::DerefPattern { subpattern, .. } => callback(subpattern),
PatKind::Variant { subpatterns, .. } | PatKind::Leaf { subpatterns } => {
for field_pat in subpatterns {

View file

@ -191,8 +191,7 @@ impl<'tcx> Value<'tcx> {
}
}
/// Destructures array, ADT or tuple constants into the constants
/// of their fields.
/// Destructures ADT constants into the constants of their fields.
pub fn destructure_adt_const(&self) -> ty::DestructuredAdtConst<'tcx> {
let fields = self.to_branch();

View file

@ -1891,6 +1891,12 @@ impl<'tcx> TyCtxt<'tcx> {
self.is_lang_item(self.parent(def_id), LangItem::AsyncDropInPlace)
}
/// Check if the given `def_id` is a const with the `#[type_const]` attribute.
pub fn is_type_const(self, def_id: DefId) -> bool {
matches!(self.def_kind(def_id), DefKind::Const | DefKind::AssocConst)
&& find_attr!(self.get_all_attrs(def_id), AttributeKind::TypeConst(_))
}
/// Returns the movability of the coroutine of `def_id`, or panics
/// if given a `def_id` that is not a coroutine.
pub fn coroutine_movability(self, def_id: DefId) -> hir::Movability {

View file

@ -586,11 +586,18 @@ impl<'tcx> TypeSuperFoldable<TyCtxt<'tcx>> for ty::Predicate<'tcx> {
self,
folder: &mut F,
) -> Result<Self, F::Error> {
// This method looks different to `Ty::try_super_fold_with` and `Const::super_fold_with`.
// Why is that? `PredicateKind` provides little scope for optimized folding, unlike
// `TyKind` and `ConstKind` (which have common variants that don't require recursive
// `fold_with` calls on their fields). So we just derive the `TypeFoldable` impl for
// `PredicateKind` and call it here because the derived code is as fast as hand-written
// code would be.
let new = self.kind().try_fold_with(folder)?;
Ok(folder.cx().reuse_or_mk_predicate(self, new))
}
fn super_fold_with<F: TypeFolder<TyCtxt<'tcx>>>(self, folder: &mut F) -> Self {
// See comment in `Predicate::try_super_fold_with`.
let new = self.kind().fold_with(folder);
folder.cx().reuse_or_mk_predicate(self, new)
}
@ -598,6 +605,7 @@ impl<'tcx> TypeSuperFoldable<TyCtxt<'tcx>> for ty::Predicate<'tcx> {
impl<'tcx> TypeSuperVisitable<TyCtxt<'tcx>> for ty::Predicate<'tcx> {
fn super_visit_with<V: TypeVisitor<TyCtxt<'tcx>>>(&self, visitor: &mut V) -> V::Result {
// See comment in `Predicate::try_super_fold_with`.
self.kind().visit_with(visitor)
}
}

View file

@ -2,11 +2,11 @@ use std::iter;
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::{self as hir, find_attr};
use rustc_macros::{Decodable, Encodable, HashStable};
use rustc_span::symbol::sym;
use tracing::debug;
use crate::query::LocalCrate;
@ -241,7 +241,7 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
/// Query provider for `incoherent_impls`.
pub(super) fn incoherent_impls_provider(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
if let Some(def_id) = simp.def()
&& !tcx.has_attr(def_id, sym::rustc_has_incoherent_inherent_impls)
&& !find_attr!(tcx.get_all_attrs(def_id), AttributeKind::RustcHasIncoherentInherentImpls)
{
return &[];
}

View file

@ -287,22 +287,14 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> {
self.parse_var(pattern)
}
fn parse_var(&mut self, mut pat: &Pat<'tcx>) -> PResult<(LocalVarId, Ty<'tcx>, Span)> {
// Make sure we throw out any `AscribeUserType` we find
loop {
match &pat.kind {
PatKind::Binding { var, ty, .. } => break Ok((*var, *ty, pat.span)),
PatKind::AscribeUserType { subpattern, .. } => {
pat = subpattern;
}
_ => {
break Err(ParseError {
span: pat.span,
item_description: format!("{:?}", pat.kind),
expected: "local".to_string(),
});
}
}
fn parse_var(&mut self, pat: &Pat<'tcx>) -> PResult<(LocalVarId, Ty<'tcx>, Span)> {
match &pat.kind {
PatKind::Binding { var, ty, .. } => Ok((*var, *ty, pat.span)),
_ => Err(ParseError {
span: pat.span,
item_description: format!("{:?}", pat.kind),
expected: "local".to_string(),
}),
}
}

View file

@ -144,11 +144,6 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> {
let arm = &self.thir[*arm];
let value = match arm.pattern.kind {
PatKind::Constant { value } => value,
PatKind::ExpandedConstant { ref subpattern, def_id: _ }
if let PatKind::Constant { value } = subpattern.kind =>
{
value
}
_ => {
return Err(ParseError {
span: arm.pattern.span,

View file

@ -1,7 +1,7 @@
//! See docs in build/expr/mod.rs
use rustc_abi::Size;
use rustc_ast as ast;
use rustc_ast::{self as ast};
use rustc_hir::LangItem;
use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, LitToConstInput, Scalar};
use rustc_middle::mir::*;
@ -47,6 +47,7 @@ pub(crate) fn as_constant_inner<'tcx>(
tcx: TyCtxt<'tcx>,
) -> ConstOperand<'tcx> {
let Expr { ty, temp_scope_id: _, span, ref kind } = *expr;
match *kind {
ExprKind::Literal { lit, neg } => {
let const_ = lit_to_mir_constant(tcx, LitToConstInput { lit: lit.node, ty, neg });
@ -69,6 +70,13 @@ pub(crate) fn as_constant_inner<'tcx>(
}
ExprKind::NamedConst { def_id, args, ref user_ty } => {
let user_ty = user_ty.as_ref().and_then(push_cuta);
if tcx.is_type_const(def_id) {
let uneval = ty::UnevaluatedConst::new(def_id, args);
let ct = ty::Const::new_unevaluated(tcx, uneval);
let const_ = Const::Ty(ty, ct);
return ConstOperand { span, user_ty, const_ };
}
let uneval = mir::UnevaluatedConst::new(def_id, args);
let const_ = Const::Unevaluated(uneval, ty);

View file

@ -133,6 +133,20 @@ impl<'tcx> MatchPairTree<'tcx> {
}
let place = place_builder.try_to_place(cx);
// Apply any type ascriptions to the value at `match_pair.place`.
if let Some(place) = place
&& let Some(extra) = &pattern.extra
{
for &Ascription { ref annotation, variance } in &extra.ascriptions {
extra_data.ascriptions.push(super::Ascription {
source: place,
annotation: annotation.clone(),
variance,
});
}
}
let mut subpairs = Vec::new();
let testable_case = match pattern.kind {
PatKind::Missing | PatKind::Wild | PatKind::Error(_) => None,
@ -195,28 +209,6 @@ impl<'tcx> MatchPairTree<'tcx> {
Some(TestableCase::Constant { value, kind: const_kind })
}
PatKind::AscribeUserType {
ascription: Ascription { ref annotation, variance },
ref subpattern,
..
} => {
MatchPairTree::for_pattern(
place_builder,
subpattern,
cx,
&mut subpairs,
extra_data,
);
// Apply the type ascription to the value at `match_pair.place`
if let Some(source) = place {
let annotation = annotation.clone();
extra_data.ascriptions.push(super::Ascription { source, annotation, variance });
}
None
}
PatKind::Binding { mode, var, is_shorthand, ref subpattern, .. } => {
// In order to please the borrow checker, when lowering a pattern
// like `x @ subpat` we must establish any bindings in `subpat`
@ -263,11 +255,6 @@ impl<'tcx> MatchPairTree<'tcx> {
None
}
PatKind::ExpandedConstant { subpattern: ref pattern, .. } => {
MatchPairTree::for_pattern(place_builder, pattern, cx, &mut subpairs, extra_data);
None
}
PatKind::Array { ref prefix, ref slice, ref suffix } => {
cx.prefix_slice_suffix(
&mut subpairs,

View file

@ -576,7 +576,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
initializer_id: ExprId,
) -> BlockAnd<()> {
match irrefutable_pat.kind {
// Optimize the case of `let x = ...` to write directly into `x`
// Optimize `let x = ...` and `let x: T = ...` to write directly into `x`,
// and then require that `T == typeof(x)` if present.
PatKind::Binding { mode: BindingMode(ByRef::No, _), var, subpattern: None, .. } => {
let place = self.storage_live_binding(
block,
@ -592,43 +593,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let source_info = self.source_info(irrefutable_pat.span);
self.cfg.push_fake_read(block, source_info, FakeReadCause::ForLet(None), place);
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
}
let ascriptions: &[_] =
try { irrefutable_pat.extra.as_deref()?.ascriptions.as_slice() }
.unwrap_or_default();
for thir::Ascription { annotation, variance: _ } in ascriptions {
let ty_source_info = self.source_info(annotation.span);
// Optimize the case of `let x: T = ...` to write directly
// into `x` and then require that `T == typeof(x)`.
PatKind::AscribeUserType {
ref subpattern,
ascription: thir::Ascription { ref annotation, variance: _ },
} if let PatKind::Binding {
mode: BindingMode(ByRef::No, _),
var,
subpattern: None,
..
} = subpattern.kind =>
{
let place = self.storage_live_binding(
block,
var,
irrefutable_pat.span,
false,
OutsideGuard,
ScheduleDrops::Yes,
);
block = self.expr_into_dest(place, block, initializer_id).into_block();
// Inject a fake read, see comments on `FakeReadCause::ForLet`.
let pattern_source_info = self.source_info(irrefutable_pat.span);
let cause_let = FakeReadCause::ForLet(None);
self.cfg.push_fake_read(block, pattern_source_info, cause_let, place);
let ty_source_info = self.source_info(annotation.span);
let base = self.canonical_user_type_annotations.push(annotation.clone());
self.cfg.push(
block,
Statement::new(
let base = self.canonical_user_type_annotations.push(annotation.clone());
let stmt = Statement::new(
ty_source_info,
StatementKind::AscribeUserType(
Box::new((place, UserTypeProjection { base, projs: Vec::new() })),
@ -648,8 +620,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// `<expr>`.
ty::Invariant,
),
),
);
);
self.cfg.push(block, stmt);
}
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
@ -879,6 +852,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&ProjectedUserTypesNode<'_>,
),
) {
// Ascriptions correspond to user-written types like `let A::<'a>(_): A<'static> = ...;`.
//
// Caution: Pushing user types here is load-bearing even for
// patterns containing no bindings, to ensure that the type ends
// up represented in MIR _somewhere_.
let user_tys = match pattern.extra.as_deref() {
Some(PatExtra { ascriptions, .. }) if !ascriptions.is_empty() => {
let base_user_tys = ascriptions
.iter()
.map(|thir::Ascription { annotation, variance: _ }| {
// Note that the variance doesn't apply here, as we are tracking the effect
// of user types on any bindings contained with subpattern.
self.canonical_user_type_annotations.push(annotation.clone())
})
.collect();
&user_tys.push_user_types(base_user_tys)
}
_ => user_tys,
};
// Avoid having to write the full method name at each recursive call.
let visit_subpat = |this: &mut Self, subpat, user_tys: &_, f: &mut _| {
this.visit_primary_bindings_special(subpat, user_tys, f)
@ -924,31 +917,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
visit_subpat(self, subpattern, &ProjectedUserTypesNode::None, f);
}
PatKind::AscribeUserType {
ref subpattern,
ascription: thir::Ascription { ref annotation, variance: _ },
} => {
// This corresponds to something like
//
// ```
// let A::<'a>(_): A<'static> = ...;
// ```
//
// Note that the variance doesn't apply here, as we are tracking the effect
// of `user_ty` on any bindings contained with subpattern.
// Caution: Pushing this user type here is load-bearing even for
// patterns containing no bindings, to ensure that the type ends
// up represented in MIR _somewhere_.
let base_user_ty = self.canonical_user_type_annotations.push(annotation.clone());
let subpattern_user_tys = user_tys.push_user_type(base_user_ty);
visit_subpat(self, subpattern, &subpattern_user_tys, f)
}
PatKind::ExpandedConstant { ref subpattern, .. } => {
visit_subpat(self, subpattern, user_tys, f)
}
PatKind::Leaf { ref subpatterns } => {
for subpattern in subpatterns {
let subpattern_user_tys = user_tys.leaf(subpattern.field);

View file

@ -8,15 +8,20 @@ use std::assert_matches::assert_matches;
use std::iter;
use rustc_abi::{FieldIdx, VariantIdx};
use rustc_data_structures::smallvec::SmallVec;
use rustc_middle::mir::{ProjectionElem, UserTypeProjection, UserTypeProjections};
use rustc_middle::ty::{AdtDef, UserTypeAnnotationIndex};
use rustc_span::Symbol;
/// A single `thir::Pat` node should almost never have more than 0-2 user types.
/// We can store up to 4 inline in the same size as an ordinary `Vec`.
pub(crate) type UserTypeIndices = SmallVec<[UserTypeAnnotationIndex; 4]>;
/// One of a list of "operations" that can be used to lazily build projections
/// of user-specified types.
#[derive(Clone, Debug)]
#[derive(Debug)]
pub(crate) enum ProjectedUserTypesOp {
PushUserType { base: UserTypeAnnotationIndex },
PushUserTypes { base_types: UserTypeIndices },
Index,
Subslice { from: u64, to: u64 },
@ -32,9 +37,10 @@ pub(crate) enum ProjectedUserTypesNode<'a> {
}
impl<'a> ProjectedUserTypesNode<'a> {
pub(crate) fn push_user_type(&'a self, base: UserTypeAnnotationIndex) -> Self {
// Pushing a base user type always causes the chain to become non-empty.
Self::Chain { parent: self, op: ProjectedUserTypesOp::PushUserType { base } }
pub(crate) fn push_user_types(&'a self, base_types: UserTypeIndices) -> Self {
assert!(!base_types.is_empty());
// Pushing one or more base user types always causes the chain to become non-empty.
Self::Chain { parent: self, op: ProjectedUserTypesOp::PushUserTypes { base_types } }
}
/// Push another projection op onto the chain, but only if it is already non-empty.
@ -94,16 +100,19 @@ impl<'a> ProjectedUserTypesNode<'a> {
return None;
}
let ops_reversed = self.iter_ops_reversed().cloned().collect::<Vec<_>>();
let ops_reversed = self.iter_ops_reversed().collect::<Vec<_>>();
// The "first" op should always be `PushUserType`.
// Other projections are only added if there is at least one user type.
assert_matches!(ops_reversed.last(), Some(ProjectedUserTypesOp::PushUserType { .. }));
assert_matches!(ops_reversed.last(), Some(ProjectedUserTypesOp::PushUserTypes { .. }));
let mut projections = vec![];
for op in ops_reversed.into_iter().rev() {
match op {
ProjectedUserTypesOp::PushUserType { base } => {
projections.push(UserTypeProjection { base, projs: vec![] })
match *op {
ProjectedUserTypesOp::PushUserTypes { ref base_types } => {
assert!(!base_types.is_empty());
for &base in base_types {
projections.push(UserTypeProjection { base, projs: vec![] })
}
}
ProjectedUserTypesOp::Index => {

View file

@ -342,8 +342,6 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
PatKind::Wild |
// these just wrap other patterns, which we recurse on below.
PatKind::Or { .. } |
PatKind::ExpandedConstant { .. } |
PatKind::AscribeUserType { .. } |
PatKind::Error(_) => {}
}
};

View file

@ -680,20 +680,13 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> {
let mut interpreted_as_const = None;
let mut interpreted_as_const_sugg = None;
// These next few matches want to peek through `AscribeUserType` to see
// the underlying pattern.
let mut unpeeled_pat = pat;
while let PatKind::AscribeUserType { ref subpattern, .. } = unpeeled_pat.kind {
unpeeled_pat = subpattern;
}
if let Some(def_id) = is_const_pat_that_looks_like_binding(self.tcx, unpeeled_pat) {
if let Some(def_id) = is_const_pat_that_looks_like_binding(self.tcx, pat) {
let span = self.tcx.def_span(def_id);
let variable = self.tcx.item_name(def_id).to_string();
// When we encounter a constant as the binding name, point at the `const` definition.
interpreted_as_const = Some(InterpretedAsConst { span, variable: variable.clone() });
interpreted_as_const_sugg = Some(InterpretedAsConstSugg { span: pat.span, variable });
} else if let PatKind::Constant { .. } = unpeeled_pat.kind
} else if let PatKind::Constant { .. } = pat.kind
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(pat.span)
{
// If the pattern to match is an integer literal:
@ -1213,7 +1206,7 @@ fn is_const_pat_that_looks_like_binding<'tcx>(tcx: TyCtxt<'tcx>, pat: &Pat<'tcx>
// The pattern must be a named constant, and the name that appears in
// the pattern's source text must resemble a plain identifier without any
// `::` namespace separators or other non-identifier characters.
if let PatKind::ExpandedConstant { def_id, .. } = pat.kind
if let Some(def_id) = try { pat.extra.as_deref()?.expanded_const? }
&& matches!(tcx.def_kind(def_id), DefKind::Const)
&& let Ok(snippet) = tcx.sess.source_map().span_to_snippet(pat.span)
&& snippet.chars().all(|c| c.is_alphanumeric() || c == '_')

View file

@ -90,7 +90,7 @@ impl<'tcx> ConstToPat<'tcx> {
);
}
}
Box::new(Pat { span: self.span, ty, kind: PatKind::Error(err.emit()) })
Box::new(Pat { span: self.span, ty, kind: PatKind::Error(err.emit()), extra: None })
}
fn unevaluated_to_pat(
@ -174,10 +174,10 @@ impl<'tcx> ConstToPat<'tcx> {
}
};
// Convert the valtree to a const.
let inlined_const_as_pat = self.valtree_to_pat(valtree, ty);
// Lower the valtree to a THIR pattern.
let mut thir_pat = self.valtree_to_pat(valtree, ty);
if !inlined_const_as_pat.references_error() {
if !thir_pat.references_error() {
// Always check for `PartialEq` if we had no other errors yet.
if !type_has_partial_eq_impl(self.tcx, typing_env, ty).has_impl {
let mut err = self.tcx.dcx().create_err(TypeNotPartialEq { span: self.span, ty });
@ -186,10 +186,10 @@ impl<'tcx> ConstToPat<'tcx> {
}
}
// Wrap the pattern in a marker node to indicate that it is the result of lowering a
// Mark the pattern to indicate that it is the result of lowering a named
// constant. This is used for diagnostics.
let kind = PatKind::ExpandedConstant { subpattern: inlined_const_as_pat, def_id: uv.def };
Box::new(Pat { kind, ty, span: self.span })
thir_pat.extra.get_or_insert_default().expanded_const = Some(uv.def);
thir_pat
}
fn field_pats(
@ -351,7 +351,7 @@ impl<'tcx> ConstToPat<'tcx> {
}
};
Box::new(Pat { span, ty, kind })
Box::new(Pat { span, ty, kind, extra: None })
}
}

View file

@ -71,14 +71,11 @@ pub(super) fn pat_from_hir<'tcx>(
span: let_stmt_type.span,
inferred_ty: typeck_results.node_type(let_stmt_type.hir_id),
};
thir_pat = Box::new(Pat {
ty: thir_pat.ty,
span: thir_pat.span,
kind: PatKind::AscribeUserType {
ascription: Ascription { annotation, variance: ty::Covariant },
subpattern: thir_pat,
},
});
thir_pat
.extra
.get_or_insert_default()
.ascriptions
.push(Ascription { annotation, variance: ty::Covariant });
}
if let Some(m) = pcx.rust_2024_migration {
@ -142,7 +139,7 @@ impl<'tcx> PatCtxt<'tcx> {
}
PatAdjust::PinDeref => PatKind::Deref { subpattern: thir_pat },
};
Box::new(Pat { span, ty: adjust.source, kind })
Box::new(Pat { span, ty: adjust.source, kind, extra: None })
});
if let Some(s) = &mut self.rust_2024_migration
@ -167,25 +164,14 @@ impl<'tcx> PatCtxt<'tcx> {
// Return None in that case; the caller will use NegInfinity or PosInfinity instead.
let Some(expr) = expr else { return Ok(None) };
// Lower the endpoint into a temporary `PatKind` that will then be
// Lower the endpoint into a temporary `thir::Pat` that will then be
// deconstructed to obtain the constant value and other data.
let mut kind: PatKind<'tcx> = self.lower_pat_expr(pat, expr);
let endpoint_pat: Box<Pat<'tcx>> = self.lower_pat_expr(pat, expr);
let box Pat { ref kind, extra, .. } = endpoint_pat;
// Unpeel any ascription or inline-const wrapper nodes.
loop {
match kind {
PatKind::AscribeUserType { ascription, subpattern } => {
ascriptions.push(ascription);
kind = subpattern.kind;
}
PatKind::ExpandedConstant { def_id: _, subpattern } => {
// Expanded-constant nodes are currently only needed by
// diagnostics that don't apply to range patterns, so we
// can just discard them here.
kind = subpattern.kind;
}
_ => break,
}
// Preserve any ascriptions from endpoint constants.
if let Some(extra) = extra {
ascriptions.extend(extra.ascriptions);
}
// The unpeeled kind should now be a constant, giving us the endpoint value.
@ -250,7 +236,7 @@ impl<'tcx> PatCtxt<'tcx> {
lo_expr: Option<&'tcx hir::PatExpr<'tcx>>,
hi_expr: Option<&'tcx hir::PatExpr<'tcx>>,
end: RangeEnd,
) -> Result<PatKind<'tcx>, ErrorGuaranteed> {
) -> Result<Box<Pat<'tcx>>, ErrorGuaranteed> {
let ty = self.typeck_results.node_type(pat.hir_id);
let span = pat.span;
@ -306,27 +292,28 @@ impl<'tcx> PatCtxt<'tcx> {
return Err(e);
}
}
let mut thir_pat = Box::new(Pat { ty, span, kind, extra: None });
// If we are handling a range with associated constants (e.g.
// `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated
// constants somewhere. Have them on the range pattern.
for ascription in ascriptions {
let subpattern = Box::new(Pat { span, ty, kind });
kind = PatKind::AscribeUserType { ascription, subpattern };
}
// `PatKind::ExpandedConstant` wrappers from range endpoints used to
thir_pat.extra.get_or_insert_default().ascriptions.extend(ascriptions);
// IDs of expanded constants from range endpoints used to
// also be preserved here, but that was only needed for unsafeck of
// inline `const { .. }` patterns, which were removed by
// <https://github.com/rust-lang/rust/pull/138492>.
Ok(kind)
Ok(thir_pat)
}
#[instrument(skip(self), level = "debug")]
fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Box<Pat<'tcx>> {
let mut ty = self.typeck_results.node_type(pat.hir_id);
let mut span = pat.span;
let ty = self.typeck_results.node_type(pat.hir_id);
let span = pat.span;
// Some of these match arms return a `Box<Pat>` early, while others
// evaluate to a `PatKind` that will become a `Box<Pat>` at the end of
// this function.
let kind = match pat.kind {
hir::PatKind::Missing => PatKind::Missing,
@ -334,10 +321,13 @@ impl<'tcx> PatCtxt<'tcx> {
hir::PatKind::Never => PatKind::Never,
hir::PatKind::Expr(value) => self.lower_pat_expr(pat, value),
hir::PatKind::Expr(value) => return self.lower_pat_expr(pat, value),
hir::PatKind::Range(lo_expr, hi_expr, end) => {
self.lower_pattern_range(pat, lo_expr, hi_expr, end).unwrap_or_else(PatKind::Error)
match self.lower_pattern_range(pat, lo_expr, hi_expr, end) {
Ok(thir_pat) => return thir_pat,
Err(e) => PatKind::Error(e),
}
}
hir::PatKind::Deref(subpattern) => {
@ -360,7 +350,7 @@ impl<'tcx> PatCtxt<'tcx> {
},
hir::PatKind::Slice(prefix, slice, suffix) => {
self.slice_or_array_pattern(pat, prefix, slice, suffix)
return self.slice_or_array_pattern(pat, prefix, slice, suffix);
}
hir::PatKind::Tuple(pats, ddpos) => {
@ -372,8 +362,9 @@ impl<'tcx> PatCtxt<'tcx> {
}
hir::PatKind::Binding(explicit_ba, id, ident, sub) => {
let mut thir_pat_span = span;
if let Some(ident_span) = ident.span.find_ancestor_inside(span) {
span = span.with_hi(ident_span.hi());
thir_pat_span = span.with_hi(ident_span.hi());
}
let mode = *self
@ -389,22 +380,23 @@ impl<'tcx> PatCtxt<'tcx> {
// A ref x pattern is the same node used for x, and as such it has
// x's type, which is &T, where we want T (the type being matched).
let var_ty = ty;
let mut thir_pat_ty = ty;
if let hir::ByRef::Yes(pinnedness, _) = mode.0 {
match pinnedness {
hir::Pinnedness::Pinned
if let Some(pty) = ty.pinned_ty()
&& let &ty::Ref(_, rty, _) = pty.kind() =>
{
ty = rty;
thir_pat_ty = rty;
}
hir::Pinnedness::Not if let &ty::Ref(_, rty, _) = ty.kind() => {
ty = rty;
thir_pat_ty = rty;
}
_ => bug!("`ref {}` has wrong type {}", ident, ty),
}
};
PatKind::Binding {
let kind = PatKind::Binding {
mode,
name: ident.name,
var: LocalVarId(id),
@ -412,7 +404,10 @@ impl<'tcx> PatCtxt<'tcx> {
subpattern: self.lower_opt_pattern(sub),
is_primary: id == pat.hir_id,
is_shorthand: false,
}
};
// We might have modified the type or span, so use the modified
// values in the THIR pattern node.
return Box::new(Pat { ty: thir_pat_ty, span: thir_pat_span, kind, extra: None });
}
hir::PatKind::TupleStruct(ref qpath, pats, ddpos) => {
@ -422,7 +417,7 @@ impl<'tcx> PatCtxt<'tcx> {
};
let variant_def = adt_def.variant_of_res(res);
let subpatterns = self.lower_tuple_subpats(pats, variant_def.fields.len(), ddpos);
self.lower_variant_or_leaf(pat, None, res, subpatterns)
return self.lower_variant_or_leaf(pat, None, res, subpatterns);
}
hir::PatKind::Struct(ref qpath, fields, _) => {
@ -439,7 +434,7 @@ impl<'tcx> PatCtxt<'tcx> {
})
.collect();
self.lower_variant_or_leaf(pat, None, res, subpatterns)
return self.lower_variant_or_leaf(pat, None, res, subpatterns);
}
hir::PatKind::Or(pats) => PatKind::Or { pats: self.lower_patterns(pats) },
@ -450,7 +445,9 @@ impl<'tcx> PatCtxt<'tcx> {
hir::PatKind::Err(guar) => PatKind::Error(guar),
};
Box::new(Pat { span, ty, kind })
// For pattern kinds that haven't already returned, create a `thir::Pat`
// with the HIR pattern node's type and span.
Box::new(Pat { span, ty, kind, extra: None })
}
fn lower_tuple_subpats(
@ -482,13 +479,14 @@ impl<'tcx> PatCtxt<'tcx> {
prefix: &'tcx [hir::Pat<'tcx>],
slice: Option<&'tcx hir::Pat<'tcx>>,
suffix: &'tcx [hir::Pat<'tcx>],
) -> PatKind<'tcx> {
) -> Box<Pat<'tcx>> {
let ty = self.typeck_results.node_type(pat.hir_id);
let span = pat.span;
let prefix = self.lower_patterns(prefix);
let slice = self.lower_opt_pattern(slice);
let suffix = self.lower_patterns(suffix);
match ty.kind() {
let kind = match ty.kind() {
// Matching a slice, `[T]`.
ty::Slice(..) => PatKind::Slice { prefix, slice, suffix },
// Fixed-length array, `[T; len]`.
@ -499,8 +497,9 @@ impl<'tcx> PatCtxt<'tcx> {
assert!(len >= prefix.len() as u64 + suffix.len() as u64);
PatKind::Array { prefix, slice, suffix }
}
_ => span_bug!(pat.span, "bad slice pattern type {ty:?}"),
}
_ => span_bug!(span, "bad slice pattern type {ty:?}"),
};
Box::new(Pat { ty, span, kind, extra: None })
}
fn lower_variant_or_leaf(
@ -509,7 +508,7 @@ impl<'tcx> PatCtxt<'tcx> {
expr: Option<&'tcx hir::PatExpr<'tcx>>,
res: Res,
subpatterns: Vec<FieldPat<'tcx>>,
) -> PatKind<'tcx> {
) -> Box<Pat<'tcx>> {
// Check whether the caller should have provided an `expr` for this pattern kind.
assert_matches!(
(pat.kind, expr),
@ -533,7 +532,7 @@ impl<'tcx> PatCtxt<'tcx> {
res => res,
};
let mut kind = match res {
let kind = match res {
Res::Def(DefKind::Variant, variant_id) => {
let enum_id = self.tcx.parent(variant_id);
let adt_def = self.tcx.adt_def(enum_id);
@ -542,7 +541,12 @@ impl<'tcx> PatCtxt<'tcx> {
ty::Adt(_, args) | ty::FnDef(_, args) => args,
ty::Error(e) => {
// Avoid ICE (#50585)
return PatKind::Error(*e);
return Box::new(Pat {
ty,
span,
kind: PatKind::Error(*e),
extra: None,
});
}
_ => bug!("inappropriate type for def: {:?}", ty),
};
@ -583,21 +587,23 @@ impl<'tcx> PatCtxt<'tcx> {
PatKind::Error(e)
}
};
let mut thir_pat = Box::new(Pat { ty, span, kind, extra: None });
if let Some(user_ty) = self.user_args_applied_to_ty_of_hir_id(hir_id) {
debug!("lower_variant_or_leaf: kind={:?} user_ty={:?} span={:?}", kind, user_ty, span);
debug!(?thir_pat, ?user_ty, ?span, "lower_variant_or_leaf: applying ascription");
let annotation = CanonicalUserTypeAnnotation {
user_ty: Box::new(user_ty),
span,
inferred_ty: self.typeck_results.node_type(hir_id),
};
kind = PatKind::AscribeUserType {
subpattern: Box::new(Pat { span, ty, kind }),
ascription: Ascription { annotation, variance: ty::Covariant },
};
thir_pat
.extra
.get_or_insert_default()
.ascriptions
.push(Ascription { annotation, variance: ty::Covariant });
}
kind
thir_pat
}
fn user_args_applied_to_ty_of_hir_id(
@ -632,8 +638,7 @@ impl<'tcx> PatCtxt<'tcx> {
_ => {
// The path isn't the name of a constant, so it must actually
// be a unit struct or unit variant (e.g. `Option::None`).
let kind = self.lower_variant_or_leaf(pat, Some(expr), res, vec![]);
return Box::new(Pat { span, ty, kind });
return self.lower_variant_or_leaf(pat, Some(expr), res, vec![]);
}
};
@ -652,16 +657,13 @@ impl<'tcx> PatCtxt<'tcx> {
span,
inferred_ty: self.typeck_results.node_type(id),
};
let kind = PatKind::AscribeUserType {
subpattern: pattern,
ascription: Ascription {
annotation,
// Note that we use `Contravariant` here. See the
// `variance` field documentation for details.
variance: ty::Contravariant,
},
};
pattern = Box::new(Pat { span, kind, ty });
// Note that we use `Contravariant` here. See the
// `variance` field documentation for details.
pattern
.extra
.get_or_insert_default()
.ascriptions
.push(Ascription { annotation, variance: ty::Contravariant });
}
pattern
@ -674,10 +676,10 @@ impl<'tcx> PatCtxt<'tcx> {
&mut self,
pat: &'tcx hir::Pat<'tcx>, // Pattern that directly contains `expr`
expr: &'tcx hir::PatExpr<'tcx>,
) -> PatKind<'tcx> {
) -> Box<Pat<'tcx>> {
assert_matches!(pat.kind, hir::PatKind::Expr(..) | hir::PatKind::Range(..));
match &expr.kind {
hir::PatExprKind::Path(qpath) => self.lower_path(pat, expr, qpath).kind,
hir::PatExprKind::Path(qpath) => self.lower_path(pat, expr, qpath),
hir::PatExprKind::Lit { lit, negated } => {
// We handle byte string literal patterns by using the pattern's type instead of the
// literal's type in `const_to_pat`: if the literal `b"..."` matches on a slice reference,
@ -691,7 +693,7 @@ impl<'tcx> PatCtxt<'tcx> {
let pat_ty = self.typeck_results.node_type(pat.hir_id);
let lit_input = LitToConstInput { lit: lit.node, ty: pat_ty, neg: *negated };
let constant = self.tcx.at(expr.span).lit_to_const(lit_input);
self.const_to_pat(constant, pat_ty, expr.hir_id, lit.span).kind
self.const_to_pat(constant, pat_ty, expr.hir_id, lit.span)
}
}
}

View file

@ -73,6 +73,24 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
self.fmt
}
fn print_list<T>(
&mut self,
label: &str,
list: &[T],
depth_lvl: usize,
print_fn: impl Fn(&mut Self, &T, usize),
) {
if list.is_empty() {
print_indented!(self, format_args!("{label}: []"), depth_lvl);
} else {
print_indented!(self, format_args!("{label}: ["), depth_lvl);
for item in list {
print_fn(self, item, depth_lvl + 1)
}
print_indented!(self, "]", depth_lvl);
}
}
fn print_param(&mut self, param: &Param<'tcx>, depth_lvl: usize) {
let Param { pat, ty, ty_span, self_kind, hir_id } = param;
@ -663,15 +681,37 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
}
fn print_pat(&mut self, pat: &Pat<'tcx>, depth_lvl: usize) {
let &Pat { ty, span, ref kind } = pat;
let &Pat { ty, span, ref kind, ref extra } = pat;
print_indented!(self, "Pat: {", depth_lvl);
print_indented!(self, format!("ty: {:?}", ty), depth_lvl + 1);
print_indented!(self, format!("span: {:?}", span), depth_lvl + 1);
self.print_pat_extra(extra.as_deref(), depth_lvl + 1);
self.print_pat_kind(kind, depth_lvl + 1);
print_indented!(self, "}", depth_lvl);
}
fn print_pat_extra(&mut self, extra: Option<&PatExtra<'tcx>>, depth_lvl: usize) {
let Some(extra) = extra else {
// Skip printing in the common case of a pattern node with no extra data.
return;
};
let PatExtra { expanded_const, ascriptions } = extra;
print_indented!(self, "extra: PatExtra {", depth_lvl);
print_indented!(self, format_args!("expanded_const: {expanded_const:?}"), depth_lvl + 1);
self.print_list(
"ascriptions",
ascriptions,
depth_lvl + 1,
|this, ascription, depth_lvl| {
print_indented!(this, format_args!("{ascription:?}"), depth_lvl);
},
);
print_indented!(self, "}", depth_lvl);
}
fn print_pat_kind(&mut self, pat_kind: &PatKind<'tcx>, depth_lvl: usize) {
print_indented!(self, "kind: PatKind {", depth_lvl);
@ -685,13 +725,6 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
PatKind::Never => {
print_indented!(self, "Never", depth_lvl + 1);
}
PatKind::AscribeUserType { ascription, subpattern } => {
print_indented!(self, "AscribeUserType: {", depth_lvl + 1);
print_indented!(self, format!("ascription: {:?}", ascription), depth_lvl + 2);
print_indented!(self, "subpattern: ", depth_lvl + 2);
self.print_pat(subpattern, depth_lvl + 3);
print_indented!(self, "}", depth_lvl + 1);
}
PatKind::Binding { name, mode, var, ty, subpattern, is_primary, is_shorthand } => {
print_indented!(self, "Binding {", depth_lvl + 1);
print_indented!(self, format!("name: {:?}", name), depth_lvl + 2);
@ -756,13 +789,6 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
print_indented!(self, format!("value: {}", value), depth_lvl + 2);
print_indented!(self, "}", depth_lvl + 1);
}
PatKind::ExpandedConstant { def_id, subpattern } => {
print_indented!(self, "ExpandedConstant {", depth_lvl + 1);
print_indented!(self, format!("def_id: {def_id:?}"), depth_lvl + 2);
print_indented!(self, "subpattern:", depth_lvl + 2);
self.print_pat(subpattern, depth_lvl + 2);
print_indented!(self, "}", depth_lvl + 1);
}
PatKind::Range(pat_range) => {
print_indented!(self, format!("Range ( {:?} )", pat_range), depth_lvl + 1);
}

View file

@ -177,8 +177,9 @@ pub struct MoveData<'tcx> {
pub rev_lookup: MovePathLookup<'tcx>,
pub inits: IndexVec<InitIndex, Init>,
/// Each Location `l` is mapped to the Inits that are effects
/// of executing the code at `l`.
pub init_loc_map: LocationMap<SmallVec<[InitIndex; 4]>>,
/// of executing the code at `l`. Only very rarely (e.g. inline asm)
/// is there more than one Init at any `l`.
pub init_loc_map: LocationMap<SmallVec<[InitIndex; 1]>>,
pub init_path_map: IndexVec<MovePathIndex, SmallVec<[InitIndex; 4]>>,
}

View file

@ -19,14 +19,14 @@ pub(crate) fn emit_inline_always_target_feature_diagnostic<'a, 'tcx>(
caller_def_id: DefId,
callee_only: &[&'a str],
) {
let callee = tcx.def_path_str(callee_def_id);
let caller = tcx.def_path_str(caller_def_id);
tcx.node_span_lint(
lint::builtin::INLINE_ALWAYS_MISMATCHING_TARGET_FEATURES,
tcx.local_def_id_to_hir_id(caller_def_id.as_local().unwrap()),
call_span,
|lint| {
let callee = tcx.def_path_str(callee_def_id);
let caller = tcx.def_path_str(caller_def_id);
lint.primary_message(format!(
"call to `#[inline(always)]`-annotated `{callee}` \
requires the same target features to be inlined"

View file

@ -10,6 +10,7 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
rustc_session = { path = "../rustc_session" }

View file

@ -75,4 +75,8 @@ monomorphize_recursion_limit =
monomorphize_start_not_found = using `fn main` requires the standard library
.help = use `#![no_main]` to bypass the Rust generated entrypoint and declare a platform specific entrypoint yourself, usually with `#[no_mangle]`
monomorphize_static_initializer_cyclic = static initializer forms a cycle involving `{$head}`
.label = part of this cycle
.note = cyclic static initializers are not supported for target `{$target}`
monomorphize_symbol_already_defined = symbol `{$symbol}` is already defined

View file

@ -267,7 +267,8 @@ pub(crate) struct UsageMap<'tcx> {
// Maps every mono item to the mono items used by it.
pub used_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
// Maps every mono item to the mono items that use it.
// Maps each mono item with users to the mono items that use it.
// Be careful: subsets `used_map`, so unused items are vacant.
user_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
}

View file

@ -117,3 +117,15 @@ pub(crate) struct AbiRequiredTargetFeature<'a> {
/// Whether this is a problem at a call site or at a declaration.
pub is_call: bool,
}
#[derive(Diagnostic)]
#[diag(monomorphize_static_initializer_cyclic)]
#[note]
pub(crate) struct StaticInitializerCyclic<'a> {
#[primary_span]
pub span: Span,
#[label]
pub labels: Vec<Span>,
pub head: &'a str,
pub target: &'a str,
}

View file

@ -0,0 +1,18 @@
//! Checks that need to operate on the entire mono item graph
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::TyCtxt;
use crate::collector::UsageMap;
use crate::graph_checks::statics::check_static_initializers_are_acyclic;
mod statics;
pub(super) fn target_specific_checks<'tcx, 'a, 'b>(
tcx: TyCtxt<'tcx>,
mono_items: &'a [MonoItem<'tcx>],
usage_map: &'b UsageMap<'tcx>,
) {
if tcx.sess.target.options.static_initializer_must_be_acyclic {
check_static_initializers_are_acyclic(tcx, mono_items, usage_map);
}
}

View file

@ -0,0 +1,115 @@
use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::graph::scc::Sccs;
use rustc_data_structures::graph::{DirectedGraph, Successors};
use rustc_data_structures::unord::UnordMap;
use rustc_hir::def_id::DefId;
use rustc_index::{Idx, IndexVec, newtype_index};
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::TyCtxt;
use crate::collector::UsageMap;
use crate::errors;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
struct StaticNodeIdx(usize);
impl Idx for StaticNodeIdx {
fn new(idx: usize) -> Self {
Self(idx)
}
fn index(self) -> usize {
self.0
}
}
impl From<usize> for StaticNodeIdx {
fn from(value: usize) -> Self {
StaticNodeIdx(value)
}
}
newtype_index! {
#[derive(Ord, PartialOrd)]
struct StaticSccIdx {}
}
// Adjacency-list graph for statics using `StaticNodeIdx` as node type.
// We cannot use `DefId` as the node type directly because each node must be
// represented by an index in the range `0..num_nodes`.
struct StaticRefGraph<'a, 'b, 'tcx> {
// maps from `StaticNodeIdx` to `DefId` and vice versa
statics: &'a FxIndexSet<DefId>,
// contains for each `MonoItem` the `MonoItem`s it uses
used_map: &'b UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
}
impl<'a, 'b, 'tcx> DirectedGraph for StaticRefGraph<'a, 'b, 'tcx> {
type Node = StaticNodeIdx;
fn num_nodes(&self) -> usize {
self.statics.len()
}
}
impl<'a, 'b, 'tcx> Successors for StaticRefGraph<'a, 'b, 'tcx> {
fn successors(&self, node_idx: StaticNodeIdx) -> impl Iterator<Item = StaticNodeIdx> {
let def_id = self.statics[node_idx.index()];
self.used_map[&MonoItem::Static(def_id)].iter().filter_map(|&mono_item| match mono_item {
MonoItem::Static(def_id) => self.statics.get_index_of(&def_id).map(|idx| idx.into()),
_ => None,
})
}
}
pub(super) fn check_static_initializers_are_acyclic<'tcx, 'a, 'b>(
tcx: TyCtxt<'tcx>,
mono_items: &'a [MonoItem<'tcx>],
usage_map: &'b UsageMap<'tcx>,
) {
// Collect statics
let statics: FxIndexSet<DefId> = mono_items
.iter()
.filter_map(|&mono_item| match mono_item {
MonoItem::Static(def_id) => Some(def_id),
_ => None,
})
.collect();
// If we don't have any statics the check is not necessary
if statics.is_empty() {
return;
}
// Create a subgraph from the mono item graph, which only contains statics
let graph = StaticRefGraph { statics: &statics, used_map: &usage_map.used_map };
// Calculate its SCCs
let sccs: Sccs<StaticNodeIdx, StaticSccIdx> = Sccs::new(&graph);
// Group statics by SCCs
let mut nodes_of_sccs: IndexVec<StaticSccIdx, Vec<StaticNodeIdx>> =
IndexVec::from_elem_n(Vec::new(), sccs.num_sccs());
for i in graph.iter_nodes() {
nodes_of_sccs[sccs.scc(i)].push(i);
}
let is_cyclic = |nodes_of_scc: &[StaticNodeIdx]| -> bool {
match nodes_of_scc.len() {
0 => false,
1 => graph.successors(nodes_of_scc[0]).any(|x| x == nodes_of_scc[0]),
2.. => true,
}
};
// Emit errors for all cycles
for nodes in nodes_of_sccs.iter_mut().filter(|nodes| is_cyclic(nodes)) {
// We sort the nodes by their Span to have consistent error line numbers
nodes.sort_by_key(|node| tcx.def_span(statics[node.index()]));
let head_def = statics[nodes[0].index()];
let head_span = tcx.def_span(head_def);
tcx.dcx().emit_err(errors::StaticInitializerCyclic {
span: head_span,
labels: nodes.iter().map(|&n| tcx.def_span(statics[n.index()])).collect(),
head: &tcx.def_path_str(head_def),
target: &tcx.sess.target.llvm_target,
});
}
}

View file

@ -16,6 +16,7 @@ use rustc_span::ErrorGuaranteed;
mod collector;
mod errors;
mod graph_checks;
mod mono_checks;
mod partitioning;
mod util;

View file

@ -124,6 +124,7 @@ use tracing::debug;
use crate::collector::{self, MonoItemCollectionStrategy, UsageMap};
use crate::errors::{CouldntDumpMonoStats, SymbolAlreadyDefined};
use crate::graph_checks::target_specific_checks;
struct PartitioningCx<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
@ -1135,6 +1136,8 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio
};
let (items, usage_map) = collector::collect_crate_mono_items(tcx, collection_strategy);
// Perform checks that need to operate on the entire mono item graph
target_specific_checks(tcx, &items, &usage_map);
// If there was an error during collection (e.g. from one of the constants we evaluated),
// then we stop here. This way codegen does not have to worry about failing constants.

View file

@ -64,7 +64,7 @@ pub(super) struct Canonicalizer<'a, D: SolverDelegate<Interner = I>, I: Interner
canonicalize_mode: CanonicalizeMode,
// Mutable fields.
variables: &'a mut Vec<I::GenericArg>,
variables: Vec<I::GenericArg>,
var_kinds: Vec<CanonicalVarKind<I>>,
variable_lookup_table: HashMap<I::GenericArg, usize>,
/// Maps each `sub_unification_table_root_var` to the index of the first
@ -84,14 +84,13 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
pub(super) fn canonicalize_response<T: TypeFoldable<I>>(
delegate: &'a D,
max_input_universe: ty::UniverseIndex,
variables: &'a mut Vec<I::GenericArg>,
value: T,
) -> ty::Canonical<I, T> {
let mut canonicalizer = Canonicalizer {
delegate,
canonicalize_mode: CanonicalizeMode::Response { max_input_universe },
variables,
variables: Vec::new(),
variable_lookup_table: Default::default(),
sub_root_lookup_table: Default::default(),
var_kinds: Vec::new(),
@ -106,16 +105,17 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
};
debug_assert!(!value.has_infer(), "unexpected infer in {value:?}");
debug_assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}");
let (max_universe, variables) = canonicalizer.finalize();
Canonical { max_universe, variables, value }
let (max_universe, _variables, var_kinds) = canonicalizer.finalize();
Canonical { max_universe, var_kinds, value }
}
fn canonicalize_param_env(
delegate: &'a D,
variables: &'a mut Vec<I::GenericArg>,
param_env: I::ParamEnv,
) -> (I::ParamEnv, HashMap<I::GenericArg, usize>, Vec<CanonicalVarKind<I>>) {
) -> (I::ParamEnv, Vec<I::GenericArg>, Vec<CanonicalVarKind<I>>, HashMap<I::GenericArg, usize>)
{
if !param_env.has_type_flags(NEEDS_CANONICAL) {
return (param_env, Default::default(), Vec::new());
return (param_env, Vec::new(), Vec::new(), Default::default());
}
// Check whether we can use the global cache for this param_env. As we only use
@ -129,12 +129,11 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
delegate.cx().canonical_param_env_cache_get_or_insert(
param_env,
|| {
let mut variables = Vec::new();
let mut env_canonicalizer = Canonicalizer {
delegate,
canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::ParamEnv),
variables: &mut variables,
variables: Vec::new(),
variable_lookup_table: Default::default(),
sub_root_lookup_table: Default::default(),
var_kinds: Vec::new(),
@ -147,7 +146,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
param_env,
variable_lookup_table: env_canonicalizer.variable_lookup_table,
var_kinds: env_canonicalizer.var_kinds,
variables,
variables: env_canonicalizer.variables,
}
},
|&CanonicalParamEnvCacheEntry {
@ -156,9 +155,12 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
ref variable_lookup_table,
ref var_kinds,
}| {
debug_assert!(variables.is_empty());
// FIXME(nnethercote): for reasons I don't understand, this `new`+`extend`
// combination is faster than `variables.clone()`, because it somehow avoids
// some allocations.
let mut variables = Vec::new();
variables.extend(cache_variables.iter().copied());
(param_env, variable_lookup_table.clone(), var_kinds.clone())
(param_env, variables, var_kinds.clone(), variable_lookup_table.clone())
},
)
} else {
@ -166,7 +168,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
delegate,
canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::ParamEnv),
variables,
variables: Vec::new(),
variable_lookup_table: Default::default(),
sub_root_lookup_table: Default::default(),
var_kinds: Vec::new(),
@ -175,7 +177,12 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
};
let param_env = param_env.fold_with(&mut env_canonicalizer);
debug_assert!(env_canonicalizer.sub_root_lookup_table.is_empty());
(param_env, env_canonicalizer.variable_lookup_table, env_canonicalizer.var_kinds)
(
param_env,
env_canonicalizer.variables,
env_canonicalizer.var_kinds,
env_canonicalizer.variable_lookup_table,
)
}
}
@ -189,12 +196,11 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
/// variable in the future by changing the way we detect global where-bounds.
pub(super) fn canonicalize_input<P: TypeFoldable<I>>(
delegate: &'a D,
variables: &'a mut Vec<I::GenericArg>,
input: QueryInput<I, P>,
) -> ty::Canonical<I, QueryInput<I, P>> {
) -> (Vec<I::GenericArg>, ty::Canonical<I, QueryInput<I, P>>) {
// First canonicalize the `param_env` while keeping `'static`
let (param_env, variable_lookup_table, var_kinds) =
Canonicalizer::canonicalize_param_env(delegate, variables, input.goal.param_env);
let (param_env, variables, var_kinds, variable_lookup_table) =
Canonicalizer::canonicalize_param_env(delegate, input.goal.param_env);
// Then canonicalize the rest of the input without keeping `'static`
// while *mostly* reusing the canonicalizer from above.
let mut rest_canonicalizer = Canonicalizer {
@ -234,8 +240,8 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
debug_assert!(!value.has_infer(), "unexpected infer in {value:?}");
debug_assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}");
let (max_universe, variables) = rest_canonicalizer.finalize();
Canonical { max_universe, variables, value }
let (max_universe, variables, var_kinds) = rest_canonicalizer.finalize();
(variables, Canonical { max_universe, var_kinds, value })
}
fn get_or_insert_bound_var(
@ -243,8 +249,9 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
arg: impl Into<I::GenericArg>,
kind: CanonicalVarKind<I>,
) -> ty::BoundVar {
// FIXME: 16 is made up and arbitrary. We should look at some
// perf data here.
// The exact value of 16 here doesn't matter that much (8 and 32 give extremely similar
// results). So long as we have protection against the rare cases where the length reaches
// 1000+ (e.g. `wg-grammar`).
let arg = arg.into();
let idx = if self.variables.len() > 16 {
if self.variable_lookup_table.is_empty() {
@ -276,19 +283,18 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
ty::BoundVar::from(idx)
}
fn finalize(self) -> (ty::UniverseIndex, I::CanonicalVarKinds) {
fn finalize(self) -> (ty::UniverseIndex, Vec<I::GenericArg>, I::CanonicalVarKinds) {
let mut var_kinds = self.var_kinds;
// See the rustc-dev-guide section about how we deal with universes
// during canonicalization in the new solver.
match self.canonicalize_mode {
let max_universe = match self.canonicalize_mode {
// All placeholders and vars are canonicalized in the root universe.
CanonicalizeMode::Input { .. } => {
debug_assert!(
var_kinds.iter().all(|var| var.universe() == ty::UniverseIndex::ROOT),
"expected all vars to be canonicalized in root universe: {var_kinds:#?}"
);
let var_kinds = self.delegate.cx().mk_canonical_var_kinds(&var_kinds);
(ty::UniverseIndex::ROOT, var_kinds)
ty::UniverseIndex::ROOT
}
// When canonicalizing a response we map a universes already entered
// by the caller to the root universe and only return useful universe
@ -302,15 +308,15 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
);
*var = var.with_updated_universe(new_uv);
}
let max_universe = var_kinds
var_kinds
.iter()
.map(|kind| kind.universe())
.max()
.unwrap_or(ty::UniverseIndex::ROOT);
let var_kinds = self.delegate.cx().mk_canonical_var_kinds(&var_kinds);
(max_universe, var_kinds)
.unwrap_or(ty::UniverseIndex::ROOT)
}
}
};
let var_kinds = self.delegate.cx().mk_canonical_var_kinds(&var_kinds);
(max_universe, self.variables, var_kinds)
}
fn inner_fold_ty(&mut self, t: I::Ty) -> I::Ty {
@ -417,7 +423,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
// We don't canonicalize `ReStatic` in the `param_env` as we use it
// when checking whether a `ParamEnv` candidate is global.
ty::ReStatic => match self.canonicalize_mode {
CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { .. }) => {
CanonicalizeMode::Input(CanonicalizeInputKind::Predicate) => {
CanonicalVarKind::Region(ty::UniverseIndex::ROOT)
}
CanonicalizeMode::Input(CanonicalizeInputKind::ParamEnv)
@ -545,7 +551,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
match self.canonicalize_mode {
CanonicalizeMode::Input(CanonicalizeInputKind::ParamEnv)
| CanonicalizeMode::Response { max_input_universe: _ } => {}
CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { .. }) => {
CanonicalizeMode::Input(CanonicalizeInputKind::Predicate) => {
panic!("erasing 'static in env")
}
}

View file

@ -59,10 +59,8 @@ where
D: SolverDelegate<Interner = I>,
I: Interner,
{
let mut orig_values = Default::default();
let canonical = Canonicalizer::canonicalize_input(
let (orig_values, canonical) = Canonicalizer::canonicalize_input(
delegate,
&mut orig_values,
QueryInput {
goal,
predefined_opaques_in_body: delegate.cx().mk_predefined_opaques_in_body(opaque_types),
@ -82,10 +80,7 @@ where
I: Interner,
T: TypeFoldable<I>,
{
let mut orig_values = Default::default();
let canonical =
Canonicalizer::canonicalize_response(delegate, max_input_universe, &mut orig_values, value);
canonical
Canonicalizer::canonicalize_response(delegate, max_input_universe, value)
}
/// After calling a canonical query, we apply the constraints returned
@ -157,7 +152,7 @@ where
//
// We therefore instantiate the existential variable in the canonical response with the
// inference variable of the input right away, which is more performant.
let mut opt_values = IndexVec::from_elem_n(None, response.variables.len());
let mut opt_values = IndexVec::from_elem_n(None, response.var_kinds.len());
for (original_value, result_value) in iter::zip(original_values, var_values.var_values.iter()) {
match result_value.kind() {
ty::GenericArgKind::Type(t) => {
@ -167,7 +162,7 @@ where
// more involved. They are also a lot rarer than region variables.
if let ty::Bound(index_kind, b) = t.kind()
&& !matches!(
response.variables.get(b.var().as_usize()).unwrap(),
response.var_kinds.get(b.var().as_usize()).unwrap(),
CanonicalVarKind::Ty { .. }
)
{
@ -189,7 +184,7 @@ where
}
}
}
CanonicalVarValues::instantiate(delegate.cx(), response.variables, |var_values, kind| {
CanonicalVarValues::instantiate(delegate.cx(), response.var_kinds, |var_values, kind| {
if kind.universe() != ty::UniverseIndex::ROOT {
// A variable from inside a binder of the query. While ideally these shouldn't
// exist at all (see the FIXME at the start of this method), we have to deal with
@ -308,7 +303,7 @@ where
let var_values = CanonicalVarValues { var_values: delegate.cx().mk_args(var_values) };
let state = inspect::State { var_values, data };
let state = eager_resolve_vars(delegate, state);
Canonicalizer::canonicalize_response(delegate, max_input_universe, &mut vec![], state)
Canonicalizer::canonicalize_response(delegate, max_input_universe, state)
}
// FIXME: needs to be pub to be accessed by downstream
@ -345,14 +340,14 @@ where
pub fn response_no_constraints_raw<I: Interner>(
cx: I,
max_universe: ty::UniverseIndex,
variables: I::CanonicalVarKinds,
var_kinds: I::CanonicalVarKinds,
certainty: Certainty,
) -> CanonicalResponse<I> {
ty::Canonical {
max_universe,
variables,
var_kinds,
value: Response {
var_values: ty::CanonicalVarValues::make_identity(cx, variables),
var_values: ty::CanonicalVarValues::make_identity(cx, var_kinds),
// FIXME: maybe we should store the "no response" version in cx, like
// we do for cx.types and stuff.
external_constraints: cx.mk_external_constraints(ExternalConstraintsData::default()),

View file

@ -97,7 +97,7 @@ where
/// The variable info for the `var_values`, only used to make an ambiguous response
/// with no constraints.
variables: I::CanonicalVarKinds,
var_kinds: I::CanonicalVarKinds,
/// What kind of goal we're currently computing, see the enum definition
/// for more info.
@ -325,7 +325,7 @@ where
// which we don't do within this evaluation context.
max_input_universe: ty::UniverseIndex::ROOT,
initial_opaque_types_storage_num_entries: Default::default(),
variables: Default::default(),
var_kinds: Default::default(),
var_values: CanonicalVarValues::dummy(),
current_goal_kind: CurrentGoalKind::Misc,
origin_span,
@ -376,7 +376,7 @@ where
let initial_opaque_types_storage_num_entries = delegate.opaque_types_storage_num_entries();
let mut ecx = EvalCtxt {
delegate,
variables: canonical_input.canonical.variables,
var_kinds: canonical_input.canonical.var_kinds,
var_values,
current_goal_kind: CurrentGoalKind::from_query_input(cx, input),
max_input_universe: canonical_input.canonical.max_universe,
@ -1323,7 +1323,7 @@ where
response_no_constraints_raw(
self.cx(),
self.max_input_universe,
self.variables,
self.var_kinds,
Certainty::Maybe { cause, opaque_types_jank },
)
}

View file

@ -47,7 +47,7 @@ where
let max_input_universe = outer.max_input_universe;
let mut nested = EvalCtxt {
delegate,
variables: outer.variables,
var_kinds: outer.var_kinds,
var_values: outer.var_values,
current_goal_kind: outer.current_goal_kind,
max_input_universe,

View file

@ -143,7 +143,7 @@ fn response_no_constraints<I: Interner>(
Ok(response_no_constraints_raw(
cx,
input.canonical.max_universe,
input.canonical.variables,
input.canonical.var_kinds,
certainty,
))
}

View file

@ -6,7 +6,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
bitflags = "2.4.1"
rustc-literal-escaper = "0.0.5"
rustc-literal-escaper = "0.0.7"
rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_data_structures = { path = "../rustc_data_structures" }

View file

@ -5,7 +5,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
rustc-literal-escaper = "0.0.5"
rustc-literal-escaper = "0.0.7"
rustc_lexer = { path = "../rustc_lexer" }
# tidy-alphabetical-end

View file

@ -184,6 +184,9 @@ pub enum Suggestion {
/// `format!("{foo:?x}")` -> `format!("{foo:x?}")`
/// `format!("{foo:?X}")` -> `format!("{foo:X?}")`
ReorderFormatParameter(Range<usize>, String),
/// Add missing colon:
/// `format!("{foo?}")` -> `format!("{foo:?}")`
AddMissingColon(Range<usize>),
}
/// The parser structure for interpreting the input format string. This is
@ -453,10 +456,11 @@ impl<'input> Parser<'input> {
suggestion: Suggestion::None,
});
if let Some((_, _, c)) = self.peek() {
match c {
'?' => self.suggest_format_debug(),
'<' | '^' | '>' => self.suggest_format_align(c),
if let (Some((_, _, c)), Some((_, _, nc))) = (self.peek(), self.peek_ahead()) {
match (c, nc) {
('?', '}') => self.missing_colon_before_debug_formatter(),
('?', _) => self.suggest_format_debug(),
('<' | '^' | '>', _) => self.suggest_format_align(c),
_ => self.suggest_positional_arg_instead_of_captured_arg(arg),
}
}
@ -849,6 +853,23 @@ impl<'input> Parser<'input> {
}
}
fn missing_colon_before_debug_formatter(&mut self) {
if let Some((range, _)) = self.consume_pos('?') {
let span = range.clone();
self.errors.insert(
0,
ParseError {
description: "expected `}`, found `?`".to_owned(),
note: Some(format!("to print `{{`, you can escape it using `{{{{`",)),
label: "expected `:` before `?` to format with `Debug`".to_owned(),
span: range,
secondary_label: None,
suggestion: Suggestion::AddMissingColon(span),
},
);
}
}
fn suggest_format_align(&mut self, alignment: char) {
if let Some((range, _)) = self.consume_pos(alignment) {
self.errors.insert(

View file

@ -244,10 +244,6 @@ passes_function_not_have_default_implementation = function doesn't have a defaul
passes_functions_names_duplicated = functions names are duplicated
.note = all `#[rustc_must_implement_one_of]` arguments must be unique
passes_has_incoherent_inherent_impl =
`rustc_has_incoherent_inherent_impls` attribute should be applied to types or traits
.label = only adts, extern types and traits are supported
passes_ignored_derived_impls =
`{$name}` has {$trait_list_len ->
[one] a derived impl

View file

@ -21,8 +21,8 @@ use rustc_feature::{
BuiltinAttribute,
};
use rustc_hir::attrs::{
AttributeKind, DocAttribute, DocInline, EiiDecl, EiiImpl, InlineAttr, MirDialect, MirPhase,
ReprAttr, SanitizerSet,
AttributeKind, DocAttribute, DocInline, EiiDecl, EiiImpl, EiiImplResolution, InlineAttr,
MirDialect, MirPhase, ReprAttr, SanitizerSet,
};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalModDefId;
@ -306,6 +306,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::CfgAttrTrace
| AttributeKind::ThreadLocal
| AttributeKind::CfiEncoding { .. }
| AttributeKind::RustcHasIncoherentInherentImpls
) => { /* do nothing */ }
Attribute::Unparsed(attr_item) => {
style = Some(attr_item.style);
@ -325,9 +326,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| [sym::rustc_then_this_would_need, ..] => self.check_rustc_dirty_clean(attr),
[sym::collapse_debuginfo, ..] => self.check_collapse_debuginfo(attr, span, target),
[sym::must_not_suspend, ..] => self.check_must_not_suspend(attr, span, target),
[sym::rustc_has_incoherent_inherent_impls, ..] => {
self.check_has_incoherent_inherent_impls(attr, span, target)
}
[sym::autodiff_forward, ..] | [sym::autodiff_reverse, ..] => {
self.check_autodiff(hir_id, attr, span, target)
}
@ -506,7 +504,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
}
fn check_eii_impl(&self, impls: &[EiiImpl], target: Target) {
for EiiImpl { span, inner_span, eii_macro, impl_marked_unsafe, is_default: _ } in impls {
for EiiImpl { span, inner_span, resolution, impl_marked_unsafe, is_default: _ } in impls {
match target {
Target::Fn => {}
_ => {
@ -514,7 +512,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
}
}
if find_attr!(self.tcx.get_all_attrs(*eii_macro), AttributeKind::EiiExternTarget(EiiDecl { impl_unsafe, .. }) if *impl_unsafe)
if let EiiImplResolution::Macro(eii_macro) = resolution
&& find_attr!(self.tcx.get_all_attrs(*eii_macro), AttributeKind::EiiExternTarget(EiiDecl { impl_unsafe, .. }) if *impl_unsafe)
&& !impl_marked_unsafe
{
self.dcx().emit_err(errors::EiiImplRequiresUnsafe {
@ -758,9 +757,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
if let Some(impls) = find_attr!(attrs, AttributeKind::EiiImpls(impls) => impls) {
let sig = self.tcx.hir_node(hir_id).fn_sig().unwrap();
for i in impls {
let name = match i.resolution {
EiiImplResolution::Macro(def_id) => self.tcx.item_name(def_id),
EiiImplResolution::Known(decl) => decl.name.name,
EiiImplResolution::Error(_eg) => continue,
};
self.dcx().emit_err(errors::EiiWithTrackCaller {
attr_span,
name: self.tcx.item_name(i.eii_macro),
name,
sig_span: sig.span,
});
}
@ -1158,17 +1162,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
}
}
fn check_has_incoherent_inherent_impls(&self, attr: &Attribute, span: Span, target: Target) {
match target {
Target::Trait | Target::Struct | Target::Enum | Target::Union | Target::ForeignTy => {}
_ => {
self.tcx
.dcx()
.emit_err(errors::HasIncoherentInherentImpl { attr_span: attr.span(), span });
}
}
}
fn check_ffi_pure(&self, attr_span: Span, attrs: &[Attribute]) {
if find_attr!(attrs, AttributeKind::FfiConst(_)) {
// `#[ffi_const]` functions cannot be `#[ffi_pure]`

View file

@ -81,7 +81,7 @@ pub(crate) fn check_externally_implementable_items<'tcx>(tcx: TyCtxt<'tcx>, ():
}
// now we have all eiis! For each of them, choose one we want to actually generate.
for (decl_did, FoundEii { decl, decl_crate, impls }) in eiis {
for (foreign_item, FoundEii { decl, decl_crate, impls }) in eiis {
let mut default_impls = Vec::new();
let mut explicit_impls = Vec::new();
@ -97,7 +97,7 @@ pub(crate) fn check_externally_implementable_items<'tcx>(tcx: TyCtxt<'tcx>, ():
// is instantly an error.
if explicit_impls.len() > 1 {
tcx.dcx().emit_err(DuplicateEiiImpls {
name: tcx.item_name(decl_did),
name: decl.name.name,
first_span: tcx.def_span(explicit_impls[0].0),
first_crate: tcx.crate_name(explicit_impls[0].1),
second_span: tcx.def_span(explicit_impls[1].0),
@ -116,7 +116,7 @@ pub(crate) fn check_externally_implementable_items<'tcx>(tcx: TyCtxt<'tcx>, ():
}
if default_impls.len() > 1 {
let decl_span = tcx.def_ident_span(decl_did).unwrap();
let decl_span = tcx.def_ident_span(foreign_item).unwrap();
tcx.dcx().span_delayed_bug(decl_span, "multiple not supported right now");
}
@ -139,8 +139,9 @@ pub(crate) fn check_externally_implementable_items<'tcx>(tcx: TyCtxt<'tcx>, ():
tcx.dcx().emit_err(EiiWithoutImpl {
current_crate_name: tcx.crate_name(LOCAL_CRATE),
decl_crate_name: tcx.crate_name(decl_crate),
name: tcx.item_name(decl_did),
span: decl.span,
// FIXME: shouldn't call `item_name`
name: decl.name.name,
span: decl.name.span,
help: (),
});

Some files were not shown because too many files have changed in this diff Show more