Merge pull request #2617 from rust-lang/rustc-pull

Rustc pull update
This commit is contained in:
Tshepang Mbambo 2025-10-27 15:42:42 +02:00 committed by GitHub
commit 044bc7a09d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
557 changed files with 9650 additions and 7125 deletions

View file

@ -427,6 +427,7 @@ Marcell Pardavi <marcell.pardavi@gmail.com>
Marco Ieni <11428655+MarcoIeni@users.noreply.github.com>
Marcus Klaas de Vries <mail@marcusklaas.nl>
Margaret Meyerhofer <mmeyerho@andrew.cmu.edu> <mmeyerho@andrew>
Marijn Schouten <mhkbst@gmail.com> <hkBst@users.noreply.github.com>
Mark Mansi <markm@cs.wisc.edu>
Mark Mansi <markm@cs.wisc.edu> <m.mim95@gmail.com>
Mark Rousskov <mark.simulacrum@gmail.com>

View file

@ -75,7 +75,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4"
dependencies = [
"anstyle",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
name = "annotate-snippets"
version = "0.12.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47224528f74de27d1d06aad6a5dda4f865b6ebe2e56c538943d746a7270cb67e"
dependencies = [
"anstyle",
"unicode-width 0.2.2",
]
[[package]]
@ -95,9 +105,9 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.11"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "anstyle-lossy"
@ -136,7 +146,7 @@ dependencies = [
"anstyle-lossy",
"anstyle-parse",
"html-escape",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
@ -677,7 +687,7 @@ checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
dependencies = [
"serde",
"termcolor",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
@ -808,7 +818,7 @@ dependencies = [
"encode_unicode",
"libc",
"once_cell",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
"windows-sys 0.59.0",
]
@ -1485,7 +1495,7 @@ version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df"
dependencies = [
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
@ -1887,7 +1897,7 @@ dependencies = [
"console",
"number_prefix",
"portable-atomic",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
"web-time",
]
@ -3756,7 +3766,9 @@ dependencies = [
name = "rustc_errors"
version = "0.0.0"
dependencies = [
"annotate-snippets 0.11.5",
"annotate-snippets 0.12.7",
"anstream",
"anstyle",
"derive_setters",
"rustc_abi",
"rustc_ast",
@ -3773,7 +3785,6 @@ dependencies = [
"rustc_span",
"serde",
"serde_json",
"termcolor",
"termize",
"tracing",
"windows 0.61.3",
@ -4327,11 +4338,10 @@ dependencies = [
"rustc_macros",
"rustc_session",
"rustc_span",
"termcolor",
"thin-vec",
"tracing",
"unicode-normalization",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
@ -4590,7 +4600,7 @@ dependencies = [
"sha1",
"sha2",
"tracing",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
]
[[package]]
@ -5936,9 +5946,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
[[package]]
name = "unicode-xid"
@ -6223,7 +6233,7 @@ dependencies = [
"bumpalo",
"leb128fmt",
"memchr",
"unicode-width 0.2.1",
"unicode-width 0.2.2",
"wasm-encoder 0.240.0",
]

View file

@ -2579,6 +2579,9 @@ pub enum TyPatKind {
/// A range pattern (e.g., `1...2`, `1..2`, `1..`, `..2`, `1..=2`, `..=2`).
Range(Option<Box<AnonConst>>, Option<Box<AnonConst>>, Spanned<RangeEnd>),
/// A `!null` pattern for raw pointers.
NotNull,
Or(ThinVec<TyPat>),
/// Placeholder for a pattern that wasn't syntactically well formed in some way.
@ -3633,49 +3636,26 @@ pub struct Trait {
pub items: ThinVec<Box<AssocItem>>,
}
/// The location of a where clause on a `TyAlias` (`Span`) and whether there was
/// a `where` keyword (`bool`). This is split out from `WhereClause`, since there
/// are two locations for where clause on type aliases, but their predicates
/// are concatenated together.
///
/// Take this example:
/// ```ignore (only-for-syntax-highlight)
/// trait Foo {
/// type Assoc<'a, 'b> where Self: 'a, Self: 'b;
/// }
/// impl Foo for () {
/// type Assoc<'a, 'b> where Self: 'a = () where Self: 'b;
/// // ^^^^^^^^^^^^^^ first where clause
/// // ^^^^^^^^^^^^^^ second where clause
/// }
/// ```
///
/// If there is no where clause, then this is `false` with `DUMMY_SP`.
#[derive(Copy, Clone, Encodable, Decodable, Debug, Default, Walkable)]
pub struct TyAliasWhereClause {
pub has_where_token: bool,
pub span: Span,
}
/// The span information for the two where clauses on a `TyAlias`.
#[derive(Copy, Clone, Encodable, Decodable, Debug, Default, Walkable)]
pub struct TyAliasWhereClauses {
/// Before the equals sign.
pub before: TyAliasWhereClause,
/// After the equals sign.
pub after: TyAliasWhereClause,
/// The index in `TyAlias.generics.where_clause.predicates` that would split
/// into predicates from the where clause before the equals sign and the ones
/// from the where clause after the equals sign.
pub split: usize,
}
#[derive(Clone, Encodable, Decodable, Debug, Walkable)]
pub struct TyAlias {
pub defaultness: Defaultness,
pub ident: Ident,
pub generics: Generics,
pub where_clauses: TyAliasWhereClauses,
/// There are two locations for where clause on type aliases. This represents the second
/// where clause, before the semicolon. The first where clause is stored inside `generics`.
///
/// Take this example:
/// ```ignore (only-for-syntax-highlight)
/// trait Foo {
/// type Assoc<'a, 'b> where Self: 'a, Self: 'b;
/// }
/// impl Foo for () {
/// type Assoc<'a, 'b> where Self: 'a = () where Self: 'b;
/// // ^^^^^^^^^^^^^^ before where clause
/// // ^^^^^^^^^^^^^^ after where clause
/// }
/// ```
pub after_where_clause: WhereClause,
#[visitable(extra = BoundKind::Bound)]
pub bounds: GenericBounds,
pub ty: Option<Box<Ty>>,

View file

@ -471,8 +471,6 @@ macro_rules! common_visitor_and_walkers {
TraitBoundModifiers,
TraitObjectSyntax,
TyAlias,
TyAliasWhereClause,
TyAliasWhereClauses,
TyKind,
TyPatKind,
UnOp,

View file

@ -7,6 +7,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::definitions::DefPathData;
use rustc_hir::{HirId, Target, find_attr};
use rustc_middle::span_bug;
use rustc_middle::ty::TyCtxt;
@ -461,7 +462,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
for (idx, arg) in args.iter().cloned().enumerate() {
if legacy_args_idx.contains(&idx) {
let node_id = self.next_node_id();
self.create_def(node_id, None, DefKind::AnonConst, f.span);
self.create_def(
node_id,
None,
DefKind::AnonConst,
DefPathData::LateAnonConst,
f.span,
);
let mut visitor = WillCreateDefIdsVisitor {};
let const_value = if let ControlFlow::Break(span) = visitor.visit_expr(&arg) {
Box::new(Expr {

View file

@ -36,20 +36,18 @@ pub(super) struct ItemLowerer<'a, 'hir> {
/// clause if it exists.
fn add_ty_alias_where_clause(
generics: &mut ast::Generics,
mut where_clauses: TyAliasWhereClauses,
after_where_clause: &ast::WhereClause,
prefer_first: bool,
) {
generics.where_clause.predicates.extend_from_slice(&after_where_clause.predicates);
let mut before = (generics.where_clause.has_where_token, generics.where_clause.span);
let mut after = (after_where_clause.has_where_token, after_where_clause.span);
if !prefer_first {
(where_clauses.before, where_clauses.after) = (where_clauses.after, where_clauses.before);
(before, after) = (after, before);
}
let where_clause =
if where_clauses.before.has_where_token || !where_clauses.after.has_where_token {
where_clauses.before
} else {
where_clauses.after
};
generics.where_clause.has_where_token = where_clause.has_where_token;
generics.where_clause.span = where_clause.span;
(generics.where_clause.has_where_token, generics.where_clause.span) =
if before.0 || !after.0 { before } else { after };
}
impl<'a, 'hir> ItemLowerer<'a, 'hir> {
@ -271,7 +269,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
self.lower_body(|this| (&[], this.expr(span, hir::ExprKind::InlineAsm(asm))));
hir::ItemKind::GlobalAsm { asm, fake_body }
}
ItemKind::TyAlias(box TyAlias { ident, generics, where_clauses, ty, .. }) => {
ItemKind::TyAlias(box TyAlias { ident, generics, after_where_clause, ty, .. }) => {
// We lower
//
// type Foo = impl Trait
@ -282,7 +280,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// opaque type Foo1: Trait
let ident = self.lower_ident(*ident);
let mut generics = generics.clone();
add_ty_alias_where_clause(&mut generics, *where_clauses, true);
add_ty_alias_where_clause(&mut generics, after_where_clause, true);
let (generics, ty) = self.lower_generics(
&generics,
id,
@ -901,10 +899,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
)
}
AssocItemKind::Type(box TyAlias {
ident, generics, where_clauses, bounds, ty, ..
ident,
generics,
after_where_clause,
bounds,
ty,
..
}) => {
let mut generics = generics.clone();
add_ty_alias_where_clause(&mut generics, *where_clauses, false);
add_ty_alias_where_clause(&mut generics, after_where_clause, false);
let (generics, kind) = self.lower_generics(
&generics,
i.id,
@ -1070,9 +1073,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
(*ident, (generics, hir::ImplItemKind::Fn(sig, body_id)))
}
AssocItemKind::Type(box TyAlias { ident, generics, where_clauses, ty, .. }) => {
AssocItemKind::Type(box TyAlias {
ident, generics, after_where_clause, ty, ..
}) => {
let mut generics = generics.clone();
add_ty_alias_where_clause(&mut generics, *where_clauses, false);
add_ty_alias_where_clause(&mut generics, after_where_clause, false);
(
*ident,
self.lower_generics(

View file

@ -51,6 +51,7 @@ use rustc_data_structures::tagged_ptr::TaggedRef;
use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle};
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId};
use rustc_hir::definitions::{DefPathData, DisambiguatorState};
use rustc_hir::lints::DelayedLint;
use rustc_hir::{
self as hir, AngleBrackets, ConstArg, GenericArg, HirId, ItemLocalMap, LifetimeSource,
@ -93,6 +94,7 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
struct LoweringContext<'a, 'hir> {
tcx: TyCtxt<'hir>,
resolver: &'a mut ResolverAstLowering,
disambiguator: DisambiguatorState,
/// Used to allocate HIR nodes.
arena: &'hir hir::Arena<'hir>,
@ -155,6 +157,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Pseudo-globals.
tcx,
resolver,
disambiguator: DisambiguatorState::new(),
arena: tcx.hir_arena,
// HirId handling.
@ -546,6 +549,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
node_id: ast::NodeId,
name: Option<Symbol>,
def_kind: DefKind,
def_path_data: DefPathData,
span: Span,
) -> LocalDefId {
let parent = self.current_hir_id_owner.def_id;
@ -561,7 +565,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let def_id = self
.tcx
.at(span)
.create_def(parent, name, def_kind, None, &mut self.resolver.disambiguator)
.create_def(parent, name, def_kind, Some(def_path_data), &mut self.disambiguator)
.def_id();
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
@ -846,6 +850,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
param,
Some(kw::UnderscoreLifetime),
DefKind::LifetimeParam,
DefPathData::DesugaredAnonymousLifetime,
ident.span,
);
debug!(?_def_id);
@ -2290,7 +2295,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// We're lowering a const argument that was originally thought to be a type argument,
// so the def collector didn't create the def ahead of time. That's why we have to do
// it here.
let def_id = self.create_def(node_id, None, DefKind::AnonConst, span);
let def_id = self.create_def(
node_id,
None,
DefKind::AnonConst,
DefPathData::LateAnonConst,
span,
);
let hir_id = self.lower_node_id(node_id);
let path_expr = Expr {

View file

@ -3,6 +3,7 @@ use std::sync::Arc;
use rustc_ast::*;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::definitions::DefPathData;
use rustc_hir::{self as hir, LangItem, Target};
use rustc_middle::span_bug;
use rustc_span::source_map::{Spanned, respan};
@ -143,7 +144,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
// return inner to be processed in next loop
PatKind::Paren(inner) => pattern = inner,
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
PatKind::MacCall(_) => {
panic!("{pattern:#?} shouldn't exist here")
}
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
}
};
@ -460,6 +463,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
)
}),
),
TyPatKind::NotNull => hir::TyPatKind::NotNull,
TyPatKind::Or(variants) => {
hir::TyPatKind::Or(self.arena.alloc_from_iter(
variants.iter().map(|pat| self.lower_ty_pat_mut(pat, base_type)),
@ -524,7 +528,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// We're generating a range end that didn't exist in the AST,
// so the def collector didn't create the def ahead of time. That's why we have to do
// it here.
let def_id = self.create_def(node_id, None, DefKind::AnonConst, span);
let def_id =
self.create_def(node_id, None, DefKind::AnonConst, DefPathData::LateAnonConst, span);
let hir_id = self.lower_node_id(node_id);
let unstable_span = self.mark_span_with_reason(

View file

@ -145,25 +145,24 @@ impl<'a> AstValidator<'a> {
&mut self,
ty_alias: &TyAlias,
) -> Result<(), errors::WhereClauseBeforeTypeAlias> {
if ty_alias.ty.is_none() || !ty_alias.where_clauses.before.has_where_token {
if ty_alias.ty.is_none() || !ty_alias.generics.where_clause.has_where_token {
return Ok(());
}
let (before_predicates, after_predicates) =
ty_alias.generics.where_clause.predicates.split_at(ty_alias.where_clauses.split);
let span = ty_alias.where_clauses.before.span;
let span = ty_alias.generics.where_clause.span;
let sugg = if !before_predicates.is_empty() || !ty_alias.where_clauses.after.has_where_token
let sugg = if !ty_alias.generics.where_clause.predicates.is_empty()
|| !ty_alias.after_where_clause.has_where_token
{
let mut state = State::new();
if !ty_alias.where_clauses.after.has_where_token {
if !ty_alias.after_where_clause.has_where_token {
state.space();
state.word_space("where");
}
let mut first = after_predicates.is_empty();
for p in before_predicates {
let mut first = ty_alias.after_where_clause.predicates.is_empty();
for p in &ty_alias.generics.where_clause.predicates {
if !first {
state.word_space(",");
}
@ -174,7 +173,7 @@ impl<'a> AstValidator<'a> {
errors::WhereClauseBeforeTypeAliasSugg::Move {
left: span,
snippet: state.s.eof(),
right: ty_alias.where_clauses.after.span.shrink_to_hi(),
right: ty_alias.after_where_clause.span.shrink_to_hi(),
}
} else {
errors::WhereClauseBeforeTypeAliasSugg::Remove { span }
@ -566,11 +565,7 @@ impl<'a> AstValidator<'a> {
self.dcx().emit_err(errors::BoundInContext { span, ctx });
}
fn check_foreign_ty_genericless(
&self,
generics: &Generics,
where_clauses: &TyAliasWhereClauses,
) {
fn check_foreign_ty_genericless(&self, generics: &Generics, after_where_clause: &WhereClause) {
let cannot_have = |span, descr, remove_descr| {
self.dcx().emit_err(errors::ExternTypesCannotHave {
span,
@ -584,14 +579,14 @@ impl<'a> AstValidator<'a> {
cannot_have(generics.span, "generic parameters", "generic parameters");
}
let check_where_clause = |where_clause: TyAliasWhereClause| {
let check_where_clause = |where_clause: &WhereClause| {
if where_clause.has_where_token {
cannot_have(where_clause.span, "`where` clauses", "`where` clause");
}
};
check_where_clause(where_clauses.before);
check_where_clause(where_clauses.after);
check_where_clause(&generics.where_clause);
check_where_clause(&after_where_clause);
}
fn check_foreign_kind_bodyless(&self, ident: Ident, kind: &str, body_span: Option<Span>) {
@ -1261,7 +1256,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
visit::walk_item(self, item);
}
ItemKind::TyAlias(
ty_alias @ box TyAlias { defaultness, bounds, where_clauses, ty, .. },
ty_alias @ box TyAlias { defaultness, bounds, after_where_clause, ty, .. },
) => {
self.check_defaultness(item.span, *defaultness);
if ty.is_none() {
@ -1276,9 +1271,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
if let Err(err) = self.check_type_alias_where_clause_location(ty_alias) {
self.dcx().emit_err(err);
}
} else if where_clauses.after.has_where_token {
} else if after_where_clause.has_where_token {
self.dcx().emit_err(errors::WhereClauseAfterTypeAlias {
span: where_clauses.after.span,
span: after_where_clause.span,
help: self.sess.is_nightly_build(),
});
}
@ -1308,7 +1303,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
defaultness,
ident,
generics,
where_clauses,
after_where_clause,
bounds,
ty,
..
@ -1316,7 +1311,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_defaultness(fi.span, *defaultness);
self.check_foreign_kind_bodyless(*ident, "type", ty.as_ref().map(|b| b.span));
self.check_type_no_bounds(bounds, "`extern` blocks");
self.check_foreign_ty_genericless(generics, where_clauses);
self.check_foreign_ty_genericless(generics, after_where_clause);
self.check_foreign_item_ascii_only(*ident);
}
ForeignItemKind::Static(box StaticItem { ident, safety, expr, .. }) => {

View file

@ -1232,6 +1232,7 @@ impl<'a> State<'a> {
self.print_expr_anon_const(end, &[]);
}
}
rustc_ast::TyPatKind::NotNull => self.word("!null"),
rustc_ast::TyPatKind::Or(variants) => {
let mut first = true;
for pat in variants {

View file

@ -59,14 +59,14 @@ impl<'a> State<'a> {
defaultness,
ident,
generics,
where_clauses,
after_where_clause,
bounds,
ty,
}) => {
self.print_associated_type(
*ident,
generics,
*where_clauses,
after_where_clause,
bounds,
ty.as_deref(),
vis,
@ -127,14 +127,12 @@ impl<'a> State<'a> {
&mut self,
ident: Ident,
generics: &ast::Generics,
where_clauses: ast::TyAliasWhereClauses,
after_where_clause: &ast::WhereClause,
bounds: &ast::GenericBounds,
ty: Option<&ast::Ty>,
vis: &ast::Visibility,
defaultness: ast::Defaultness,
) {
let (before_predicates, after_predicates) =
generics.where_clause.predicates.split_at(where_clauses.split);
let (cb, ib) = self.head("");
self.print_visibility(vis);
self.print_defaultness(defaultness);
@ -145,13 +143,13 @@ impl<'a> State<'a> {
self.word_nbsp(":");
self.print_type_bounds(bounds);
}
self.print_where_clause_parts(where_clauses.before.has_where_token, before_predicates);
self.print_where_clause(&generics.where_clause);
if let Some(ty) = ty {
self.space();
self.word_space("=");
self.print_type(ty);
}
self.print_where_clause_parts(where_clauses.after.has_where_token, after_predicates);
self.print_where_clause(&after_where_clause);
self.word(";");
self.end(ib);
self.end(cb);
@ -283,14 +281,14 @@ impl<'a> State<'a> {
defaultness,
ident,
generics,
where_clauses,
after_where_clause,
bounds,
ty,
}) => {
self.print_associated_type(
*ident,
generics,
*where_clauses,
after_where_clause,
bounds,
ty.as_deref(),
&item.vis,
@ -585,14 +583,14 @@ impl<'a> State<'a> {
defaultness,
ident,
generics,
where_clauses,
after_where_clause,
bounds,
ty,
}) => {
self.print_associated_type(
*ident,
generics,
*where_clauses,
after_where_clause,
bounds,
ty.as_deref(),
vis,
@ -759,14 +757,7 @@ impl<'a> State<'a> {
}
fn print_where_clause(&mut self, where_clause: &ast::WhereClause) {
self.print_where_clause_parts(where_clause.has_where_token, &where_clause.predicates);
}
fn print_where_clause_parts(
&mut self,
has_where_token: bool,
predicates: &[ast::WherePredicate],
) {
let ast::WhereClause { has_where_token, ref predicates, span: _ } = *where_clause;
if predicates.is_empty() && !has_where_token {
return;
}

View file

@ -326,7 +326,8 @@ pub fn parse_cfg_attr(
}) {
Ok(r) => return Some(r),
Err(e) => {
let suggestions = CFG_ATTR_TEMPLATE.suggestions(cfg_attr.style, sym::cfg_attr);
let suggestions =
CFG_ATTR_TEMPLATE.suggestions(Some(cfg_attr.style), sym::cfg_attr);
e.with_span_suggestions(
cfg_attr.span,
"must be of the form",
@ -356,7 +357,7 @@ pub fn parse_cfg_attr(
template: CFG_ATTR_TEMPLATE,
attribute: AttrPath::from_ast(&cfg_attr.get_normal_item().path),
reason,
attr_style: cfg_attr.style,
suggestions: CFG_ATTR_TEMPLATE.suggestions(Some(cfg_attr.style), sym::cfg_attr),
});
}
}
@ -388,6 +389,7 @@ fn parse_cfg_attr_internal<'a>(
let cfg_predicate = AttributeParser::parse_single_args(
sess,
attribute.span,
attribute.get_normal_item().span(),
attribute.style,
AttrPath {
segments: attribute

View file

@ -20,12 +20,7 @@ impl<S: Stage> SingleAttributeParser<S> for CrateNameParser {
return None;
};
Some(AttributeKind::CrateName {
name,
name_span: n.value_span,
attr_span: cx.attr_span,
style: cx.attr_style,
})
Some(AttributeKind::CrateName { name, name_span: n.value_span, attr_span: cx.attr_span })
}
}

View file

@ -56,8 +56,7 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser {
}
}
ArgParser::NameValue(_) => {
let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE
.suggestions(cx.attr_style, "inline");
let suggestions = cx.suggestions();
let span = cx.attr_span;
cx.emit_lint(AttributeLintKind::IllFormedAttributeInput { suggestions }, span);
return None;

View file

@ -71,8 +71,7 @@ impl<S: Stage> CombineAttributeParser<S> for LinkParser {
// Specifically `#[link = "dl"]` is accepted with a FCW
// For more information, see https://github.com/rust-lang/rust/pull/143193
ArgParser::NameValue(nv) if nv.value_as_str().is_some_and(|v| v == sym::dl) => {
let suggestions = <Self as CombineAttributeParser<S>>::TEMPLATE
.suggestions(cx.attr_style, "link");
let suggestions = cx.suggestions();
let span = cx.attr_span;
cx.emit_lint(AttributeLintKind::IllFormedAttributeInput { suggestions }, span);
return None;

View file

@ -1,4 +1,3 @@
use rustc_ast::AttrStyle;
use rustc_errors::DiagArgValue;
use rustc_hir::attrs::MacroUseArgs;
@ -102,7 +101,7 @@ impl<S: Stage> AttributeParser<S> for MacroUseParser {
}
}
ArgParser::NameValue(_) => {
let suggestions = MACRO_USE_TEMPLATE.suggestions(cx.attr_style, sym::macro_use);
let suggestions = cx.suggestions();
cx.emit_err(IllFormedAttributeInputLint {
num_suggestions: suggestions.len(),
suggestions: DiagArgValue::StrListSepByAnd(
@ -149,19 +148,14 @@ impl<S: Stage> SingleAttributeParser<S> for MacroExportParser {
]);
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
let suggestions = || {
<Self as SingleAttributeParser<S>>::TEMPLATE
.suggestions(AttrStyle::Inner, "macro_export")
};
let local_inner_macros = match args {
ArgParser::NoArgs => false,
ArgParser::List(list) => {
let Some(l) = list.single() else {
let span = cx.attr_span;
let suggestions = cx.suggestions();
cx.emit_lint(
AttributeLintKind::InvalidMacroExportArguments {
suggestions: suggestions(),
},
AttributeLintKind::InvalidMacroExportArguments { suggestions },
span,
);
return None;
@ -170,10 +164,9 @@ impl<S: Stage> SingleAttributeParser<S> for MacroExportParser {
Some(sym::local_inner_macros) => true,
_ => {
let span = cx.attr_span;
let suggestions = cx.suggestions();
cx.emit_lint(
AttributeLintKind::InvalidMacroExportArguments {
suggestions: suggestions(),
},
AttributeLintKind::InvalidMacroExportArguments { suggestions },
span,
);
return None;
@ -182,7 +175,7 @@ impl<S: Stage> SingleAttributeParser<S> for MacroExportParser {
}
ArgParser::NameValue(_) => {
let span = cx.attr_span;
let suggestions = suggestions();
let suggestions = cx.suggestions();
cx.emit_err(IllFormedAttributeInputLint {
num_suggestions: suggestions.len(),
suggestions: DiagArgValue::StrListSepByAnd(

View file

@ -45,8 +45,7 @@ impl<S: Stage> SingleAttributeParser<S> for MustUseParser {
Some(value_str)
}
ArgParser::List(_) => {
let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE
.suggestions(cx.attr_style, "must_use");
let suggestions = cx.suggestions();
cx.emit_err(IllFormedAttributeInputLint {
num_suggestions: suggestions.len(),
suggestions: DiagArgValue::StrListSepByAnd(

View file

@ -20,8 +20,7 @@ impl<S: Stage> SingleAttributeParser<S> for IgnoreParser {
ArgParser::NoArgs => None,
ArgParser::NameValue(name_value) => {
let Some(str_value) = name_value.value_as_str() else {
let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE
.suggestions(cx.attr_style, "ignore");
let suggestions = cx.suggestions();
let span = cx.attr_span;
cx.emit_lint(
AttributeLintKind::IllFormedAttributeInput { suggestions },
@ -32,8 +31,7 @@ impl<S: Stage> SingleAttributeParser<S> for IgnoreParser {
Some(str_value)
}
ArgParser::List(_) => {
let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE
.suggestions(cx.attr_style, "ignore");
let suggestions = cx.suggestions();
let span = cx.attr_span;
cx.emit_lint(AttributeLintKind::IllFormedAttributeInput { suggestions }, span);
return None;

View file

@ -337,8 +337,16 @@ pub struct Late;
/// Gives [`AttributeParser`]s enough information to create errors, for example.
pub struct AcceptContext<'f, 'sess, S: Stage> {
pub(crate) shared: SharedContext<'f, 'sess, S>,
/// The span of the attribute currently being parsed
/// The outer span of the attribute currently being parsed
/// #[attribute(...)]
/// ^^^^^^^^^^^^^^^^^ outer span
/// For attributes in `cfg_attr`, the outer span and inner spans are equal.
pub(crate) attr_span: Span,
/// The inner span of the attribute currently being parsed
/// #[attribute(...)]
/// ^^^^^^^^^^^^^^ inner span
pub(crate) inner_span: Span,
/// Whether it is an inner or outer attribute
pub(crate) attr_style: AttrStyle,
@ -427,7 +435,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
i.kind.is_bytestr().then(|| self.sess().source_map().start_point(i.span))
}),
},
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -438,7 +446,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedIntegerLiteral,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -449,7 +457,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedList,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -460,7 +468,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedNoArgs,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -472,7 +480,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedIdentifier,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -485,7 +493,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedNameValue(name),
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -497,7 +505,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::DuplicateKey(key),
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -510,7 +518,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::UnexpectedLiteral,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -521,7 +529,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedSingleArgument,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -532,7 +540,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
template: self.template.clone(),
attribute: self.attr_path.clone(),
reason: AttributeParseErrorReason::ExpectedAtLeastOneArgument,
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -552,7 +560,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
strings: false,
list: false,
},
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -573,7 +581,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
strings: false,
list: true,
},
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -593,7 +601,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
strings: true,
list: false,
},
attr_style: self.attr_style,
suggestions: self.suggestions(),
})
}
@ -605,6 +613,13 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> {
span,
);
}
pub(crate) fn suggestions(&self) -> Vec<String> {
// If the outer and inner spans are equal, we are parsing an attribute from `cfg_attr`,
// So don't display an attribute style in the suggestions
let style = (self.attr_span != self.inner_span).then_some(self.attr_style);
self.template.suggestions(style, &self.attr_path)
}
}
impl<'f, 'sess, S: Stage> Deref for AcceptContext<'f, 'sess, S> {

View file

@ -142,6 +142,7 @@ impl<'sess> AttributeParser<'sess, Early> {
Self::parse_single_args(
sess,
attr.span,
normal_attr.item.span(),
attr.style,
path.get_attribute_path(),
target_span,
@ -159,6 +160,7 @@ impl<'sess> AttributeParser<'sess, Early> {
pub fn parse_single_args<T, I>(
sess: &'sess Session,
attr_span: Span,
inner_span: Span,
attr_style: AttrStyle,
attr_path: AttrPath,
target_span: Span,
@ -186,6 +188,7 @@ impl<'sess> AttributeParser<'sess, Early> {
},
},
attr_span,
inner_span,
attr_style,
template,
attr_path,
@ -305,6 +308,7 @@ impl<'sess, S: Stage> AttributeParser<'sess, S> {
emit_lint: &mut emit_lint,
},
attr_span: lower_span(attr.span),
inner_span: lower_span(attr.get_normal_item().span()),
attr_style: attr.style,
template: &accept.template,
attr_path: path.get_attribute_path(),

View file

@ -1,6 +1,6 @@
use std::num::IntErrorKind;
use rustc_ast::{self as ast, AttrStyle, Path};
use rustc_ast::{self as ast, Path};
use rustc_errors::codes::*;
use rustc_errors::{
Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level,
@ -613,10 +613,10 @@ pub(crate) enum AttributeParseErrorReason<'a> {
pub(crate) struct AttributeParseError<'a> {
pub(crate) span: Span,
pub(crate) attr_span: Span,
pub(crate) attr_style: AttrStyle,
pub(crate) template: AttributeTemplate,
pub(crate) attribute: AttrPath,
pub(crate) reason: AttributeParseErrorReason<'a>,
pub(crate) suggestions: Vec<String>,
}
impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> {
@ -752,16 +752,15 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> {
if let Some(link) = self.template.docs {
diag.note(format!("for more information, visit <{link}>"));
}
let suggestions = self.template.suggestions(self.attr_style, &name);
diag.span_suggestions(
self.attr_span,
if suggestions.len() == 1 {
if self.suggestions.len() == 1 {
"must be of the form"
} else {
"try changing it to one of the following valid forms of the attribute"
},
suggestions,
self.suggestions,
Applicability::HasPlaceholders,
);

View file

@ -1046,16 +1046,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
}
}
&Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, ty) => {
let trait_ref =
ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::Sized, span), [ty]);
self.prove_trait_ref(
trait_ref,
location.to_locations(),
ConstraintCategory::SizedBound,
);
}
&Rvalue::NullaryOp(NullOp::ContractChecks, _) => {}
&Rvalue::NullaryOp(NullOp::UbChecks, _) => {}

View file

@ -283,7 +283,7 @@ builtin_macros_requires_cfg_pattern =
macro requires a cfg-pattern as an argument
.label = cfg-pattern required
builtin_macros_source_uitls_expected_item = expected item, found `{$token}`
builtin_macros_source_utils_expected_item = expected item, found `{$token}`
builtin_macros_takes_no_arguments = {$name} takes no arguments

View file

@ -610,7 +610,7 @@ impl<'a> TraitDef<'a> {
defaultness: ast::Defaultness::Final,
ident,
generics: Generics::default(),
where_clauses: ast::TyAliasWhereClauses::default(),
after_where_clause: ast::WhereClause::default(),
bounds: Vec::new(),
ty: Some(type_def.to_ty(cx, self.span, type_ident, generics)),
})),

View file

@ -952,7 +952,7 @@ pub(crate) struct AttributeOnlyUsableWithCrateType<'a> {
}
#[derive(Diagnostic)]
#[diag(builtin_macros_source_uitls_expected_item)]
#[diag(builtin_macros_source_utils_expected_item)]
pub(crate) struct ExpectedItem<'a> {
#[primary_span]
pub span: Span,

View file

@ -30,15 +30,21 @@ fn parse_pat_ty<'a>(
let ty = parser.parse_ty()?;
parser.expect_keyword(exp!(Is))?;
let pat = pat_to_ty_pat(
cx,
parser.parse_pat_no_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)?,
);
let start = parser.token.span;
let pat = if parser.eat(exp!(Bang)) {
parser.expect_keyword(exp!(Null))?;
ty_pat(TyPatKind::NotNull, start.to(parser.token.span))
} else {
pat_to_ty_pat(
cx,
parser.parse_pat_no_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)?,
)
};
if parser.token != token::Eof {
parser.unexpected()?;

View file

@ -72,10 +72,6 @@ pub fn debug_tuple() -> DebugTuple {
DebugTuple(())
}
pub fn size_of<T>() -> usize {
intrinsics::size_of::<T>()
}
pub fn use_size_of() -> usize {
size_of::<u64>()
}

View file

@ -6,6 +6,7 @@
extern_types,
decl_macro,
rustc_attrs,
rustc_private,
transparent_unions,
auto_traits,
freeze_impls,
@ -594,7 +595,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
impl<T> Box<T> {
pub fn new(val: T) -> Box<T> {
unsafe {
let size = intrinsics::size_of::<T>();
let size = size_of::<T>();
let ptr = libc::malloc(size);
intrinsics::copy(&val as *const T as *const u8, ptr, size);
Box(Unique { pointer: NonNull(ptr as *const T), _marker: PhantomData }, Global)
@ -646,11 +647,11 @@ pub mod intrinsics {
#[rustc_intrinsic]
pub fn abort() -> !;
#[rustc_intrinsic]
pub fn size_of<T>() -> usize;
pub const fn size_of<T>() -> usize;
#[rustc_intrinsic]
pub unsafe fn size_of_val<T: ?::Sized>(val: *const T) -> usize;
#[rustc_intrinsic]
pub fn align_of<T>() -> usize;
pub const fn align_of<T>() -> usize;
#[rustc_intrinsic]
pub unsafe fn align_of_val<T: ?::Sized>(val: *const T) -> usize;
#[rustc_intrinsic]
@ -715,6 +716,23 @@ impl<T> Index<usize> for [T] {
}
}
pub const fn size_of<T>() -> usize {
<T as SizedTypeProperties>::SIZE
}
pub const fn align_of<T>() -> usize {
<T as SizedTypeProperties>::ALIGN
}
trait SizedTypeProperties: Sized {
#[lang = "mem_size_const"]
const SIZE: usize = intrinsics::size_of::<Self>();
#[lang = "mem_align_const"]
const ALIGN: usize = intrinsics::align_of::<Self>();
}
impl<T> SizedTypeProperties for T {}
extern "C" {
type VaListImpl;
}

View file

@ -109,10 +109,10 @@ fn start<T: Termination + 'static>(
puts(*argv as *const i8);
}
unsafe {
puts(*((argv as usize + intrinsics::size_of::<*const u8>()) as *const *const i8));
puts(*((argv as usize + size_of::<*const u8>()) as *const *const i8));
}
unsafe {
puts(*((argv as usize + 2 * intrinsics::size_of::<*const u8>()) as *const *const i8));
puts(*((argv as usize + 2 * size_of::<*const u8>()) as *const *const i8));
}
}
@ -213,8 +213,8 @@ fn main() {
assert_eq!(intrinsics::size_of_val(a) as u8, 16);
assert_eq!(intrinsics::size_of_val(&0u32) as u8, 4);
assert_eq!(intrinsics::align_of::<u16>() as u8, 2);
assert_eq!(intrinsics::align_of_val(&a) as u8, intrinsics::align_of::<&str>() as u8);
assert_eq!(align_of::<u16>() as u8, 2);
assert_eq!(intrinsics::align_of_val(&a) as u8, align_of::<&str>() as u8);
let u8_needs_drop = const { intrinsics::needs_drop::<u8>() };
assert!(!u8_needs_drop);

View file

@ -829,19 +829,10 @@ fn codegen_stmt<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, cur_block: Block, stmt:
fx.bcx.ins().nop();
}
}
Rvalue::ShallowInitBox(ref operand, content_ty) => {
let content_ty = fx.monomorphize(content_ty);
let box_layout = fx.layout_of(Ty::new_box(fx.tcx, content_ty));
let operand = codegen_operand(fx, operand);
let operand = operand.load_scalar(fx);
lval.write_cvalue(fx, CValue::by_val(operand, box_layout));
}
Rvalue::NullaryOp(ref null_op, ty) => {
assert!(lval.layout().ty.is_sized(fx.tcx, fx.typing_env()));
let layout = fx.layout_of(fx.monomorphize(ty));
let val = match null_op {
NullOp::SizeOf => layout.size.bytes(),
NullOp::AlignOf => layout.align.bytes(),
NullOp::OffsetOf(fields) => fx
.tcx
.offset_of_subfield(
@ -924,6 +915,7 @@ fn codegen_stmt<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, cur_block: Block, stmt:
lval.write_cvalue_transmute(fx, operand);
}
Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in codegen"),
Rvalue::ShallowInitBox(..) => bug!("`ShallowInitBox` in codegen"),
}
}
StatementKind::StorageLive(_)

View file

@ -131,6 +131,11 @@ pub(crate) fn coerce_unsized_into<'tcx>(
dst.write_cvalue(fx, CValue::by_val_pair(base, info, dst.layout()));
};
match (&src_ty.kind(), &dst_ty.kind()) {
(ty::Pat(a, _), ty::Pat(b, _)) => {
let src = src.cast_pat_ty_to_base(fx.layout_of(*a));
let dst = dst.place_transmute_type(fx, *b);
return coerce_unsized_into(fx, src, dst);
}
(&ty::Ref(..), &ty::Ref(..))
| (&ty::Ref(..), &ty::RawPtr(..))
| (&ty::RawPtr(..), &ty::RawPtr(..)) => coerce_ptr(),

View file

@ -342,6 +342,14 @@ impl<'tcx> CValue<'tcx> {
assert_eq!(self.layout().backend_repr, layout.backend_repr);
CValue(self.0, layout)
}
pub(crate) fn cast_pat_ty_to_base(self, layout: TyAndLayout<'tcx>) -> Self {
let ty::Pat(base, _) = *self.layout().ty.kind() else {
panic!("not a pattern type: {:#?}", self.layout())
};
assert_eq!(layout.ty, base);
CValue(self.0, layout)
}
}
/// A place where you can write a value to or read a value from

View file

@ -1,7 +1,5 @@
/*
* TODO(antoyo): implement equality in libgccjit based on https://zpz.github.io/blog/overloading-equality-operator-in-cpp-class-hierarchy/ (for type equality?)
* TODO(antoyo): support #[inline] attributes.
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
* For Thin LTO, this might be helpful:
// cspell:disable-next-line
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.

View file

@ -39,13 +39,11 @@ trait ArgAttributesExt {
const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
[(ArgAttribute::InReg, llvm::AttributeKind::InReg)];
const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 6] = [
const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 4] = [
(ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
(ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress),
(ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
(ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
(ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
(ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly),
];
fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> {
@ -81,15 +79,23 @@ fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'
}
for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
if regular.contains(attr) {
// captures(...) is only available since LLVM 21.
if (attr == ArgAttribute::CapturesReadOnly || attr == ArgAttribute::CapturesAddress)
&& llvm_util::get_version() < (21, 0, 0)
{
continue;
}
attrs.push(llattr.create_attr(cx.llcx));
}
}
// captures(...) is only available since LLVM 21.
if (21, 0, 0) <= llvm_util::get_version() {
const CAPTURES_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 3] = [
(ArgAttribute::CapturesNone, llvm::AttributeKind::CapturesNone),
(ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress),
(ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly),
];
for (attr, llattr) in CAPTURES_ATTRIBUTES {
if regular.contains(attr) {
attrs.push(llattr.create_attr(cx.llcx));
break;
}
}
}
} else if cx.tcx.sess.opts.unstable_opts.sanitizer.contains(SanitizerSet::MEMORY) {
// If we're not optimising, *but* memory sanitizer is on, emit noundef, since it affects
// memory sanitizer's behavior.

View file

@ -1,37 +0,0 @@
#[cfg(test)]
mod tests;
/// Joins command-line arguments into a single space-separated string, quoting
/// and escaping individual arguments as necessary.
///
/// The result is intended to be informational, for embedding in debug metadata,
/// and might not be properly quoted/escaped for actual command-line use.
pub(crate) fn quote_command_line_args(args: &[String]) -> String {
// Start with a decent-sized buffer, since rustc invocations tend to be long.
let mut buf = String::with_capacity(128);
for arg in args {
if !buf.is_empty() {
buf.push(' ');
}
print_arg_quoted(&mut buf, arg);
}
buf
}
/// Equivalent to LLVM's `sys::printArg` with quoting always enabled
/// (see llvm/lib/Support/Program.cpp).
fn print_arg_quoted(buf: &mut String, arg: &str) {
buf.reserve(arg.len() + 2);
buf.push('"');
for ch in arg.chars() {
if matches!(ch, '"' | '\\' | '$') {
buf.push('\\');
}
buf.push(ch);
}
buf.push('"');
}

View file

@ -1,25 +0,0 @@
#[test]
fn quote_command_line_args() {
use super::quote_command_line_args;
struct Case<'a> {
args: &'a [&'a str],
expected: &'a str,
}
let cases = &[
Case { args: &[], expected: "" },
Case { args: &["--hello", "world"], expected: r#""--hello" "world""# },
Case { args: &["--hello world"], expected: r#""--hello world""# },
Case {
args: &["plain", "$dollar", "spa ce", r"back\slash", r#""quote""#, "plain"],
expected: r#""plain" "\$dollar" "spa ce" "back\\slash" "\"quote\"" "plain""#,
},
];
for &Case { args, expected } in cases {
let args = args.iter().copied().map(str::to_owned).collect::<Vec<_>>();
let actual = quote_command_line_args(&args);
assert_eq!(actual, expected, "args {args:?}");
}
}

View file

@ -1,5 +1,4 @@
pub(crate) mod archive;
mod command_line_args;
pub(crate) mod lto;
pub(crate) mod owned_target_machine;
mod profiling;

View file

@ -38,8 +38,6 @@ impl OwnedTargetMachine {
output_obj_file: &CStr,
debug_info_compression: &CStr,
use_emulated_tls: bool,
argv0: &str,
command_line_args: &str,
use_wasm_eh: bool,
) -> Result<Self, LlvmError<'static>> {
// SAFETY: llvm::LLVMRustCreateTargetMachine copies pointed to data
@ -66,10 +64,6 @@ impl OwnedTargetMachine {
output_obj_file.as_ptr(),
debug_info_compression.as_ptr(),
use_emulated_tls,
argv0.as_ptr(),
argv0.len(),
command_line_args.as_ptr(),
command_line_args.len(),
use_wasm_eh,
)
};

View file

@ -31,7 +31,6 @@ use rustc_span::{BytePos, InnerSpan, Pos, SpanData, SyntaxContext, sym};
use rustc_target::spec::{CodeModel, FloatAbi, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel};
use tracing::{debug, trace};
use crate::back::command_line_args::quote_command_line_args;
use crate::back::lto::ThinBuffer;
use crate::back::owned_target_machine::OwnedTargetMachine;
use crate::back::profiling::{
@ -253,19 +252,6 @@ pub(crate) fn target_machine_factory(
let use_emulated_tls = matches!(sess.tls_model(), TlsModel::Emulated);
// Command-line information to be included in the target machine.
// This seems to only be used for embedding in PDB debuginfo files.
// FIXME(Zalathar): Maybe skip this for non-PDB targets?
let argv0 = std::env::current_exe()
.unwrap_or_default()
.into_os_string()
.into_string()
.unwrap_or_default();
let command_line_args = quote_command_line_args(&sess.expanded_args);
// Self-profile counter for the number of bytes produced by command-line quoting.
// Values are summed, so the summary result is cumulative across all TM factories.
sess.prof.artifact_size("quoted_command_line_args", "-", command_line_args.len() as u64);
let debuginfo_compression = sess.opts.debuginfo_compression.to_string();
match sess.opts.debuginfo_compression {
rustc_session::config::DebugInfoCompression::Zlib => {
@ -326,8 +312,6 @@ pub(crate) fn target_machine_factory(
&output_obj_file,
&debuginfo_compression,
use_emulated_tls,
&argv0,
&command_line_args,
use_wasm_eh,
)
})
@ -358,7 +342,7 @@ fn write_bitcode_to_file(module: &ModuleCodegen<ModuleLlvm>, path: &Path) {
}
}
/// In what context is a dignostic handler being attached to a codegen unit?
/// In what context is a diagnostic handler being attached to a codegen unit?
pub(crate) enum CodegenDiagnosticsStage {
/// Prelink optimization stage.
Opt,

View file

@ -1325,6 +1325,8 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
};
}
let llvm_version = crate::llvm_util::get_version();
/// Converts a vector mask, where each element has a bit width equal to the data elements it is used with,
/// down to an i1 based mask that can be used by llvm intrinsics.
///
@ -1808,7 +1810,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
);
// Alignment of T, must be a constant integer value:
let alignment = bx.const_i32(bx.align_of(in_elem).bytes() as i32);
let alignment = bx.align_of(in_elem).bytes();
// Truncate the mask vector to a vector of i1s:
let mask = vector_mask_to_bitmask(bx, args[2].immediate(), mask_elem_bitwidth, in_len);
@ -1819,11 +1821,23 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
// Type of the vector of elements:
let llvm_elem_vec_ty = llvm_vector_ty(bx, element_ty0, in_len);
return Ok(bx.call_intrinsic(
"llvm.masked.gather",
&[llvm_elem_vec_ty, llvm_pointer_vec_ty],
&[args[1].immediate(), alignment, mask, args[0].immediate()],
));
let args: &[&'ll Value] = if llvm_version < (22, 0, 0) {
let alignment = bx.const_i32(alignment as i32);
&[args[1].immediate(), alignment, mask, args[0].immediate()]
} else {
&[args[1].immediate(), mask, args[0].immediate()]
};
let call =
bx.call_intrinsic("llvm.masked.gather", &[llvm_elem_vec_ty, llvm_pointer_vec_ty], args);
if llvm_version >= (22, 0, 0) {
crate::attributes::apply_to_callsite(
call,
crate::llvm::AttributePlace::Argument(0),
&[crate::llvm::CreateAlignmentAttr(bx.llcx, alignment)],
)
}
return Ok(call);
}
if name == sym::simd_masked_load {
@ -1891,18 +1905,30 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
// Alignment of T, must be a constant integer value:
let alignment = bx.const_i32(bx.align_of(values_elem).bytes() as i32);
let alignment = bx.align_of(values_elem).bytes();
let llvm_pointer = bx.type_ptr();
// Type of the vector of elements:
let llvm_elem_vec_ty = llvm_vector_ty(bx, values_elem, values_len);
return Ok(bx.call_intrinsic(
"llvm.masked.load",
&[llvm_elem_vec_ty, llvm_pointer],
&[args[1].immediate(), alignment, mask, args[2].immediate()],
));
let args: &[&'ll Value] = if llvm_version < (22, 0, 0) {
let alignment = bx.const_i32(alignment as i32);
&[args[1].immediate(), alignment, mask, args[2].immediate()]
} else {
&[args[1].immediate(), mask, args[2].immediate()]
};
let call = bx.call_intrinsic("llvm.masked.load", &[llvm_elem_vec_ty, llvm_pointer], args);
if llvm_version >= (22, 0, 0) {
crate::attributes::apply_to_callsite(
call,
crate::llvm::AttributePlace::Argument(0),
&[crate::llvm::CreateAlignmentAttr(bx.llcx, alignment)],
)
}
return Ok(call);
}
if name == sym::simd_masked_store {
@ -1964,18 +1990,29 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
// Alignment of T, must be a constant integer value:
let alignment = bx.const_i32(bx.align_of(values_elem).bytes() as i32);
let alignment = bx.align_of(values_elem).bytes();
let llvm_pointer = bx.type_ptr();
// Type of the vector of elements:
let llvm_elem_vec_ty = llvm_vector_ty(bx, values_elem, values_len);
return Ok(bx.call_intrinsic(
"llvm.masked.store",
&[llvm_elem_vec_ty, llvm_pointer],
&[args[2].immediate(), args[1].immediate(), alignment, mask],
));
let args: &[&'ll Value] = if llvm_version < (22, 0, 0) {
let alignment = bx.const_i32(alignment as i32);
&[args[2].immediate(), args[1].immediate(), alignment, mask]
} else {
&[args[2].immediate(), args[1].immediate(), mask]
};
let call = bx.call_intrinsic("llvm.masked.store", &[llvm_elem_vec_ty, llvm_pointer], args);
if llvm_version >= (22, 0, 0) {
crate::attributes::apply_to_callsite(
call,
crate::llvm::AttributePlace::Argument(1),
&[crate::llvm::CreateAlignmentAttr(bx.llcx, alignment)],
)
}
return Ok(call);
}
if name == sym::simd_scatter {
@ -2040,7 +2077,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
);
// Alignment of T, must be a constant integer value:
let alignment = bx.const_i32(bx.align_of(in_elem).bytes() as i32);
let alignment = bx.align_of(in_elem).bytes();
// Truncate the mask vector to a vector of i1s:
let mask = vector_mask_to_bitmask(bx, args[2].immediate(), mask_elem_bitwidth, in_len);
@ -2050,12 +2087,25 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
// Type of the vector of elements:
let llvm_elem_vec_ty = llvm_vector_ty(bx, element_ty0, in_len);
return Ok(bx.call_intrinsic(
let args: &[&'ll Value] = if llvm_version < (22, 0, 0) {
let alignment = bx.const_i32(alignment as i32);
&[args[0].immediate(), args[1].immediate(), alignment, mask]
} else {
&[args[0].immediate(), args[1].immediate(), mask]
};
let call = bx.call_intrinsic(
"llvm.masked.scatter",
&[llvm_elem_vec_ty, llvm_pointer_vec_ty],
&[args[0].immediate(), args[1].immediate(), alignment, mask],
));
args,
);
if llvm_version >= (22, 0, 0) {
crate::attributes::apply_to_callsite(
call,
crate::llvm::AttributePlace::Argument(1),
&[crate::llvm::CreateAlignmentAttr(bx.llcx, alignment)],
)
}
return Ok(call);
}
macro_rules! arith_red {

View file

@ -289,6 +289,7 @@ pub(crate) enum AttributeKind {
DeadOnUnwind = 43,
DeadOnReturn = 44,
CapturesReadOnly = 45,
CapturesNone = 46,
}
/// LLVMIntPredicate
@ -2330,10 +2331,6 @@ unsafe extern "C" {
OutputObjFile: *const c_char,
DebugInfoCompression: *const c_char,
UseEmulatedTls: bool,
Argv0: *const c_uchar, // See "PTR_LEN_STR".
Argv0Len: size_t,
CommandLineArgs: *const c_uchar, // See "PTR_LEN_STR".
CommandLineArgsLen: size_t,
UseWasmEH: bool,
) -> *mut TargetMachine;

View file

@ -15,8 +15,8 @@ use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard};
use rustc_errors::emitter::Emitter;
use rustc_errors::translation::Translator;
use rustc_errors::{
Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalErrorMarker, Level, MultiSpan, Style,
Suggestions,
Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, FatalErrorMarker, Level,
MultiSpan, Style, Suggestions,
};
use rustc_fs_util::link_or_copy;
use rustc_incremental::{
@ -346,12 +346,6 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub split_dwarf_kind: rustc_session::config::SplitDwarfKind,
pub pointer_size: Size,
/// All commandline args used to invoke the compiler, with @file args fully expanded.
/// This will only be used within debug info, e.g. in the pdb file on windows
/// This is mainly useful for other tools that reads that debuginfo to figure out
/// how to call the compiler with the same arguments.
pub expanded_args: Vec<String>,
/// Emitter to use for diagnostics produced during codegen.
pub diag_emitter: SharedEmitter,
/// LLVM optimizations for which we want to print remarks.
@ -380,7 +374,7 @@ fn generate_thin_lto_work<B: ExtraBackendMethods>(
each_linked_rlib_for_lto: &[PathBuf],
needs_thin_lto: Vec<(String, B::ThinBuffer)>,
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>,
) -> Vec<(WorkItem<B>, u64)> {
) -> Vec<(ThinLtoWorkItem<B>, u64)> {
let _prof_timer = cgcx.prof.generic_activity("codegen_thin_generate_lto_work");
let (lto_modules, copy_jobs) = B::run_thin_lto(
@ -394,11 +388,11 @@ fn generate_thin_lto_work<B: ExtraBackendMethods>(
.into_iter()
.map(|module| {
let cost = module.cost();
(WorkItem::ThinLto(module), cost)
(ThinLtoWorkItem::ThinLto(module), cost)
})
.chain(copy_jobs.into_iter().map(|wp| {
(
WorkItem::CopyPostLtoArtifacts(CachedModuleCodegen {
ThinLtoWorkItem::CopyPostLtoArtifacts(CachedModuleCodegen {
name: wp.cgu_name.clone(),
source: wp,
}),
@ -703,64 +697,73 @@ pub(crate) enum WorkItem<B: WriteBackendMethods> {
/// Copy the post-LTO artifacts from the incremental cache to the output
/// directory.
CopyPostLtoArtifacts(CachedModuleCodegen),
/// Performs fat LTO on the given module.
FatLto {
exported_symbols_for_lto: Arc<Vec<String>>,
each_linked_rlib_for_lto: Vec<PathBuf>,
needs_fat_lto: Vec<FatLtoInput<B>>,
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>,
},
}
enum ThinLtoWorkItem<B: WriteBackendMethods> {
/// Copy the post-LTO artifacts from the incremental cache to the output
/// directory.
CopyPostLtoArtifacts(CachedModuleCodegen),
/// Performs thin-LTO on the given module.
ThinLto(lto::ThinModule<B>),
}
// `pthread_setname()` on *nix ignores anything beyond the first 15
// bytes. Use short descriptions to maximize the space available for
// the module name.
#[cfg(not(windows))]
fn desc(short: &str, _long: &str, name: &str) -> String {
// The short label is three bytes, and is followed by a space. That
// leaves 11 bytes for the CGU name. How we obtain those 11 bytes
// depends on the CGU name form.
//
// - Non-incremental, e.g. `regex.f10ba03eb5ec7975-cgu.0`: the part
// before the `-cgu.0` is the same for every CGU, so use the
// `cgu.0` part. The number suffix will be different for each
// CGU.
//
// - Incremental (normal), e.g. `2i52vvl2hco29us0`: use the whole
// name because each CGU will have a unique ASCII hash, and the
// first 11 bytes will be enough to identify it.
//
// - Incremental (with `-Zhuman-readable-cgu-names`), e.g.
// `regex.f10ba03eb5ec7975-re_builder.volatile`: use the whole
// name. The first 11 bytes won't be enough to uniquely identify
// it, but no obvious substring will, and this is a rarely used
// option so it doesn't matter much.
//
assert_eq!(short.len(), 3);
let name = if let Some(index) = name.find("-cgu.") {
&name[index + 1..] // +1 skips the leading '-'.
} else {
name
};
format!("{short} {name}")
}
// Windows has no thread name length limit, so use more descriptive names.
#[cfg(windows)]
fn desc(_short: &str, long: &str, name: &str) -> String {
format!("{long} {name}")
}
impl<B: WriteBackendMethods> WorkItem<B> {
/// Generate a short description of this work item suitable for use as a thread name.
fn short_description(&self) -> String {
// `pthread_setname()` on *nix ignores anything beyond the first 15
// bytes. Use short descriptions to maximize the space available for
// the module name.
#[cfg(not(windows))]
fn desc(short: &str, _long: &str, name: &str) -> String {
// The short label is three bytes, and is followed by a space. That
// leaves 11 bytes for the CGU name. How we obtain those 11 bytes
// depends on the CGU name form.
//
// - Non-incremental, e.g. `regex.f10ba03eb5ec7975-cgu.0`: the part
// before the `-cgu.0` is the same for every CGU, so use the
// `cgu.0` part. The number suffix will be different for each
// CGU.
//
// - Incremental (normal), e.g. `2i52vvl2hco29us0`: use the whole
// name because each CGU will have a unique ASCII hash, and the
// first 11 bytes will be enough to identify it.
//
// - Incremental (with `-Zhuman-readable-cgu-names`), e.g.
// `regex.f10ba03eb5ec7975-re_builder.volatile`: use the whole
// name. The first 11 bytes won't be enough to uniquely identify
// it, but no obvious substring will, and this is a rarely used
// option so it doesn't matter much.
//
assert_eq!(short.len(), 3);
let name = if let Some(index) = name.find("-cgu.") {
&name[index + 1..] // +1 skips the leading '-'.
} else {
name
};
format!("{short} {name}")
}
// Windows has no thread name length limit, so use more descriptive names.
#[cfg(windows)]
fn desc(_short: &str, long: &str, name: &str) -> String {
format!("{long} {name}")
}
match self {
WorkItem::Optimize(m) => desc("opt", "optimize module", &m.name),
WorkItem::CopyPostLtoArtifacts(m) => desc("cpy", "copy LTO artifacts for", &m.name),
WorkItem::FatLto { .. } => desc("lto", "fat LTO module", "everything"),
WorkItem::ThinLto(m) => desc("lto", "thin-LTO module", m.name()),
}
}
}
impl<B: WriteBackendMethods> ThinLtoWorkItem<B> {
/// Generate a short description of this work item suitable for use as a thread name.
fn short_description(&self) -> String {
match self {
ThinLtoWorkItem::CopyPostLtoArtifacts(m) => {
desc("cpy", "copy LTO artifacts for", &m.name)
}
ThinLtoWorkItem::ThinLto(m) => desc("lto", "thin-LTO module", m.name()),
}
}
}
@ -891,7 +894,7 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
cgcx: &CodegenContext<B>,
module: CachedModuleCodegen,
) -> WorkItemResult<B> {
) -> CompiledModule {
let _timer = cgcx
.prof
.generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &*module.name);
@ -964,7 +967,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
cgcx.create_dcx().handle().emit_fatal(errors::NoSavedObjectFile { cgu_name: &module.name })
}
WorkItemResult::Finished(CompiledModule {
CompiledModule {
links_from_incr_cache,
kind: ModuleKind::Regular,
name: module.name,
@ -973,17 +976,19 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
bytecode,
assembly,
llvm_ir,
})
}
}
fn execute_fat_lto_work_item<B: ExtraBackendMethods>(
fn do_fat_lto<B: ExtraBackendMethods>(
cgcx: &CodegenContext<B>,
exported_symbols_for_lto: &[String],
each_linked_rlib_for_lto: &[PathBuf],
mut needs_fat_lto: Vec<FatLtoInput<B>>,
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>,
) -> WorkItemResult<B> {
let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", "everything");
) -> CompiledModule {
let _timer = cgcx.prof.verbose_generic_activity("LLVM_fatlto");
check_lto_allowed(&cgcx);
for (module, wp) in import_only_modules {
needs_fat_lto.push(FatLtoInput::Serialized { name: wp.cgu_name, buffer: module })
@ -995,19 +1000,155 @@ fn execute_fat_lto_work_item<B: ExtraBackendMethods>(
each_linked_rlib_for_lto,
needs_fat_lto,
);
let module = B::codegen(cgcx, module, &cgcx.module_config);
WorkItemResult::Finished(module)
B::codegen(cgcx, module, &cgcx.module_config)
}
fn do_thin_lto<'a, B: ExtraBackendMethods>(
cgcx: &'a CodegenContext<B>,
exported_symbols_for_lto: Arc<Vec<String>>,
each_linked_rlib_for_lto: Vec<PathBuf>,
needs_thin_lto: Vec<(String, <B as WriteBackendMethods>::ThinBuffer)>,
lto_import_only_modules: Vec<(
SerializedModule<<B as WriteBackendMethods>::ModuleBuffer>,
WorkProduct,
)>,
) -> Vec<CompiledModule> {
let _timer = cgcx.prof.verbose_generic_activity("LLVM_thinlto");
check_lto_allowed(&cgcx);
let (coordinator_send, coordinator_receive) = channel();
// First up, convert our jobserver into a helper thread so we can use normal
// mpsc channels to manage our messages and such.
// After we've requested tokens then we'll, when we can,
// get tokens on `coordinator_receive` which will
// get managed in the main loop below.
let coordinator_send2 = coordinator_send.clone();
let helper = jobserver::client()
.into_helper_thread(move |token| {
drop(coordinator_send2.send(ThinLtoMessage::Token(token)));
})
.expect("failed to spawn helper thread");
let mut work_items = vec![];
// We have LTO work to do. Perform the serial work here of
// figuring out what we're going to LTO and then push a
// bunch of work items onto our queue to do LTO. This all
// happens on the coordinator thread but it's very quick so
// we don't worry about tokens.
for (work, cost) in generate_thin_lto_work(
cgcx,
&exported_symbols_for_lto,
&each_linked_rlib_for_lto,
needs_thin_lto,
lto_import_only_modules,
) {
let insertion_index =
work_items.binary_search_by_key(&cost, |&(_, cost)| cost).unwrap_or_else(|e| e);
work_items.insert(insertion_index, (work, cost));
if cgcx.parallel {
helper.request_token();
}
}
let mut codegen_aborted = None;
// These are the Jobserver Tokens we currently hold. Does not include
// the implicit Token the compiler process owns no matter what.
let mut tokens = vec![];
// Amount of tokens that are used (including the implicit token).
let mut used_token_count = 0;
let mut compiled_modules = vec![];
// Run the message loop while there's still anything that needs message
// processing. Note that as soon as codegen is aborted we simply want to
// wait for all existing work to finish, so many of the conditions here
// only apply if codegen hasn't been aborted as they represent pending
// work to be done.
loop {
if codegen_aborted.is_none() {
if used_token_count == 0 && work_items.is_empty() {
// All codegen work is done.
break;
}
// Spin up what work we can, only doing this while we've got available
// parallelism slots and work left to spawn.
while used_token_count < tokens.len() + 1
&& let Some((item, _)) = work_items.pop()
{
spawn_thin_lto_work(&cgcx, coordinator_send.clone(), item);
used_token_count += 1;
}
} else {
// Don't queue up any more work if codegen was aborted, we're
// just waiting for our existing children to finish.
if used_token_count == 0 {
break;
}
}
// Relinquish accidentally acquired extra tokens. Subtract 1 for the implicit token.
tokens.truncate(used_token_count.saturating_sub(1));
match coordinator_receive.recv().unwrap() {
// Save the token locally and the next turn of the loop will use
// this to spawn a new unit of work, or it may get dropped
// immediately if we have no more work to spawn.
ThinLtoMessage::Token(token) => match token {
Ok(token) => {
tokens.push(token);
}
Err(e) => {
let msg = &format!("failed to acquire jobserver token: {e}");
cgcx.diag_emitter.fatal(msg);
codegen_aborted = Some(FatalError);
}
},
ThinLtoMessage::WorkItem { result } => {
// If a thread exits successfully then we drop a token associated
// with that worker and update our `used_token_count` count.
// We may later re-acquire a token to continue running more work.
// We may also not actually drop a token here if the worker was
// running with an "ephemeral token".
used_token_count -= 1;
match result {
Ok(compiled_module) => compiled_modules.push(compiled_module),
Err(Some(WorkerFatalError)) => {
// Like `CodegenAborted`, wait for remaining work to finish.
codegen_aborted = Some(FatalError);
}
Err(None) => {
// If the thread failed that means it panicked, so
// we abort immediately.
bug!("worker thread panicked");
}
}
}
}
}
if let Some(codegen_aborted) = codegen_aborted {
codegen_aborted.raise();
}
compiled_modules
}
fn execute_thin_lto_work_item<B: ExtraBackendMethods>(
cgcx: &CodegenContext<B>,
module: lto::ThinModule<B>,
) -> WorkItemResult<B> {
) -> CompiledModule {
let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", module.name());
let module = B::optimize_thin(cgcx, module);
let module = B::codegen(cgcx, module, &cgcx.module_config);
WorkItemResult::Finished(module)
B::codegen(cgcx, module, &cgcx.module_config)
}
/// Messages sent to the coordinator.
@ -1041,6 +1182,17 @@ pub(crate) enum Message<B: WriteBackendMethods> {
CodegenAborted,
}
/// Messages sent to the coordinator.
pub(crate) enum ThinLtoMessage {
/// A jobserver token has become available. Sent from the jobserver helper
/// thread.
Token(io::Result<Acquired>),
/// The backend has finished processing a work item for a codegen unit.
/// Sent from a backend worker thread.
WorkItem { result: Result<CompiledModule, Option<WorkerFatalError>> },
}
/// A message sent from the coordinator thread to the main thread telling it to
/// process another codegen unit.
pub struct CguMessage;
@ -1092,9 +1244,8 @@ fn start_executing_work<B: ExtraBackendMethods>(
regular_config: Arc<ModuleConfig>,
allocator_config: Arc<ModuleConfig>,
allocator_module: Option<ModuleCodegen<B::Module>>,
tx_to_llvm_workers: Sender<Message<B>>,
coordinator_send: Sender<Message<B>>,
) -> thread::JoinHandle<Result<CompiledModules, ()>> {
let coordinator_send = tx_to_llvm_workers;
let sess = tcx.sess;
let mut each_linked_rlib_for_lto = Vec::new();
@ -1153,7 +1304,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
remark: sess.opts.cg.remark.clone(),
remark_dir,
incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()),
expanded_args: tcx.sess.expanded_args.clone(),
diag_emitter: shared_emitter.clone(),
output_filenames: Arc::clone(tcx.output_filenames(())),
module_config: regular_config,
@ -1314,7 +1464,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
let mut needs_fat_lto = Vec::new();
let mut needs_thin_lto = Vec::new();
let mut lto_import_only_modules = Vec::new();
let mut started_lto = false;
/// Possible state transitions:
/// - Ongoing -> Completed
@ -1404,63 +1553,8 @@ fn start_executing_work<B: ExtraBackendMethods>(
if running_with_any_token(main_thread_state, running_with_own_token) == 0
&& work_items.is_empty()
{
// All codegen work is done. Do we have LTO work to do?
if needs_fat_lto.is_empty()
&& needs_thin_lto.is_empty()
&& lto_import_only_modules.is_empty()
{
// Nothing more to do!
break;
}
// We have LTO work to do. Perform the serial work here of
// figuring out what we're going to LTO and then push a
// bunch of work items onto our queue to do LTO. This all
// happens on the coordinator thread but it's very quick so
// we don't worry about tokens.
assert!(!started_lto);
started_lto = true;
let needs_fat_lto = mem::take(&mut needs_fat_lto);
let needs_thin_lto = mem::take(&mut needs_thin_lto);
let import_only_modules = mem::take(&mut lto_import_only_modules);
let each_linked_rlib_file_for_lto =
mem::take(&mut each_linked_rlib_file_for_lto);
check_lto_allowed(&cgcx);
if !needs_fat_lto.is_empty() {
assert!(needs_thin_lto.is_empty());
work_items.push((
WorkItem::FatLto {
exported_symbols_for_lto: Arc::clone(&exported_symbols_for_lto),
each_linked_rlib_for_lto: each_linked_rlib_file_for_lto,
needs_fat_lto,
import_only_modules,
},
0,
));
if cgcx.parallel {
helper.request_token();
}
} else {
for (work, cost) in generate_thin_lto_work(
&cgcx,
&exported_symbols_for_lto,
&each_linked_rlib_file_for_lto,
needs_thin_lto,
import_only_modules,
) {
let insertion_index = work_items
.binary_search_by_key(&cost, |&(_, cost)| cost)
.unwrap_or_else(|e| e);
work_items.insert(insertion_index, (work, cost));
if cgcx.parallel {
helper.request_token();
}
}
}
// All codegen work is done.
break;
}
// In this branch, we know that everything has been codegened,
@ -1598,12 +1692,10 @@ fn start_executing_work<B: ExtraBackendMethods>(
compiled_modules.push(compiled_module);
}
Ok(WorkItemResult::NeedsFatLto(fat_lto_input)) => {
assert!(!started_lto);
assert!(needs_thin_lto.is_empty());
needs_fat_lto.push(fat_lto_input);
}
Ok(WorkItemResult::NeedsThinLto(name, thin_buffer)) => {
assert!(!started_lto);
assert!(needs_fat_lto.is_empty());
needs_thin_lto.push((name, thin_buffer));
}
@ -1620,7 +1712,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
}
Message::AddImportOnlyModule { module_data, work_product } => {
assert!(!started_lto);
assert_eq!(codegen_state, Ongoing);
assert_eq!(main_thread_state, MainThreadState::Codegenning);
lto_import_only_modules.push((module_data, work_product));
@ -1629,12 +1720,43 @@ fn start_executing_work<B: ExtraBackendMethods>(
}
}
// Drop to print timings
drop(llvm_start_time);
if codegen_state == Aborted {
return Err(());
}
// Drop to print timings
drop(llvm_start_time);
drop(codegen_state);
drop(tokens);
drop(helper);
assert!(work_items.is_empty());
if !needs_fat_lto.is_empty() {
assert!(compiled_modules.is_empty());
assert!(needs_thin_lto.is_empty());
// This uses the implicit token
let module = do_fat_lto(
&cgcx,
&exported_symbols_for_lto,
&each_linked_rlib_file_for_lto,
needs_fat_lto,
lto_import_only_modules,
);
compiled_modules.push(module);
} else if !needs_thin_lto.is_empty() || !lto_import_only_modules.is_empty() {
assert!(compiled_modules.is_empty());
assert!(needs_fat_lto.is_empty());
compiled_modules.extend(do_thin_lto(
&cgcx,
exported_symbols_for_lto,
each_linked_rlib_file_for_lto,
needs_thin_lto,
lto_import_only_modules,
));
}
// Regardless of what order these modules completed in, report them to
// the backend in the same order every time to ensure that we're handing
@ -1725,20 +1847,9 @@ fn spawn_work<'a, B: ExtraBackendMethods>(
B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || {
let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work {
WorkItem::Optimize(m) => execute_optimize_work_item(&cgcx, m),
WorkItem::CopyPostLtoArtifacts(m) => execute_copy_from_cache_work_item(&cgcx, m),
WorkItem::FatLto {
exported_symbols_for_lto,
each_linked_rlib_for_lto,
needs_fat_lto,
import_only_modules,
} => execute_fat_lto_work_item(
&cgcx,
&exported_symbols_for_lto,
&each_linked_rlib_for_lto,
needs_fat_lto,
import_only_modules,
),
WorkItem::ThinLto(m) => execute_thin_lto_work_item(&cgcx, m),
WorkItem::CopyPostLtoArtifacts(m) => {
WorkItemResult::Finished(execute_copy_from_cache_work_item(&cgcx, m))
}
}));
let msg = match result {
@ -1758,6 +1869,36 @@ fn spawn_work<'a, B: ExtraBackendMethods>(
.expect("failed to spawn work thread");
}
fn spawn_thin_lto_work<'a, B: ExtraBackendMethods>(
cgcx: &'a CodegenContext<B>,
coordinator_send: Sender<ThinLtoMessage>,
work: ThinLtoWorkItem<B>,
) {
let cgcx = cgcx.clone();
B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || {
let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work {
ThinLtoWorkItem::CopyPostLtoArtifacts(m) => execute_copy_from_cache_work_item(&cgcx, m),
ThinLtoWorkItem::ThinLto(m) => execute_thin_lto_work_item(&cgcx, m),
}));
let msg = match result {
Ok(result) => ThinLtoMessage::WorkItem { result: Ok(result) },
// We ignore any `FatalError` coming out of `execute_work_item`, as a
// diagnostic was already sent off to the main thread - just surface
// that there was an error in this worker.
Err(err) if err.is::<FatalErrorMarker>() => {
ThinLtoMessage::WorkItem { result: Err(Some(WorkerFatalError)) }
}
Err(_) => ThinLtoMessage::WorkItem { result: Err(None) },
};
drop(coordinator_send.send(msg));
})
.expect("failed to spawn work thread");
}
enum SharedEmitterMessage {
Diagnostic(Diagnostic),
InlineAsmError(SpanData, String, Level, Option<(String, Vec<InnerSpan>)>),

View file

@ -228,6 +228,7 @@ pub(crate) fn unsize_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
) -> (Bx::Value, Bx::Value) {
debug!("unsize_ptr: {:?} => {:?}", src_ty, dst_ty);
match (src_ty.kind(), dst_ty.kind()) {
(&ty::Pat(a, _), &ty::Pat(b, _)) => unsize_ptr(bx, src, a, b, old_info),
(&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(b, _))
| (&ty::RawPtr(a, _), &ty::RawPtr(b, _)) => {
assert_eq!(bx.cx().type_is_sized(a), old_info.is_none());

View file

@ -120,8 +120,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
| sym::atomic_singlethreadfence
| sym::caller_location => {}
_ => {
span_bug!(span, "nullary intrinsic {name} must either be in a const block or explicitly opted out because it is inherently a runtime intrinsic
");
span_bug!(
span,
"Nullary intrinsic {name} must be called in a const block. \
If you are seeing this message from code outside the standard library, the \
unstable implementation details of the relevant intrinsic may have changed. \
Consider using stable APIs instead. \
If you are adding a new nullary intrinsic that is inherently a runtime \
intrinsic, update this check."
);
}
}
}

View file

@ -611,16 +611,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ty = self.monomorphize(ty);
let layout = bx.cx().layout_of(ty);
let val = match null_op {
mir::NullOp::SizeOf => {
assert!(bx.cx().type_is_sized(ty));
let val = layout.size.bytes();
bx.cx().const_usize(val)
}
mir::NullOp::AlignOf => {
assert!(bx.cx().type_is_sized(ty));
let val = layout.align.bytes();
bx.cx().const_usize(val)
}
mir::NullOp::OffsetOf(fields) => {
let val = bx
.tcx()
@ -724,15 +714,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
}
mir::Rvalue::ShallowInitBox(ref operand, content_ty) => {
let operand = self.codegen_operand(bx, operand);
let val = operand.immediate();
let content_ty = self.monomorphize(content_ty);
let box_layout = bx.cx().layout_of(Ty::new_box(bx.tcx(), content_ty));
OperandRef { val: OperandValue::Immediate(val), layout: box_layout }
}
mir::Rvalue::WrapUnsafeBinder(ref operand, binder_ty) => {
let operand = self.codegen_operand(bx, operand);
let binder_ty = self.monomorphize(binder_ty);
@ -740,6 +721,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandRef { val: operand.val, layout }
}
mir::Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in codegen"),
mir::Rvalue::ShallowInitBox(..) => bug!("`ShallowInitBox` in codegen"),
}
}

View file

@ -646,11 +646,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
Rvalue::Cast(_, _, _) => {}
Rvalue::NullaryOp(
NullOp::SizeOf
| NullOp::AlignOf
| NullOp::OffsetOf(_)
| NullOp::UbChecks
| NullOp::ContractChecks,
NullOp::OffsetOf(_) | NullOp::UbChecks | NullOp::ContractChecks,
_,
) => {}
Rvalue::ShallowInitBox(_, _) => {}

View file

@ -414,8 +414,6 @@ fn report_eval_error<'tcx>(
let (error, backtrace) = error.into_parts();
backtrace.print_backtrace();
let instance = with_no_trimmed_paths!(cid.instance.to_string());
super::report(
ecx,
error,
@ -430,7 +428,7 @@ fn report_eval_error<'tcx>(
diag.subdiagnostic(frame);
}
// Add after the frame rendering above, as it adds its own `instance` args.
diag.arg("instance", instance);
diag.arg("instance", with_no_trimmed_paths!(cid.instance.to_string()));
diag.arg("num_frames", num_frames);
},
)

View file

@ -127,7 +127,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
} else if all_fields_1zst(def.variant(var1))? {
def.variant(var0)
} else {
// No varant is all-1-ZST, so no NPO.
// No variant is all-1-ZST, so no NPO.
return interp_ok(layout);
};
// The "relevant" variant must have exactly one field, and its type is the "inner" type.

View file

@ -466,6 +466,12 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
) -> InterpResult<'tcx> {
trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
match (src.layout.ty.kind(), cast_ty.ty.kind()) {
(&ty::Pat(_, s_pat), &ty::Pat(cast_ty, c_pat)) if s_pat == c_pat => {
let src = self.project_field(src, FieldIdx::ZERO)?;
let dest = self.project_field(dest, FieldIdx::ZERO)?;
let cast_ty = self.layout_of(cast_ty)?;
self.unsize_into(&src, cast_ty, &dest)
}
(&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
| (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {

View file

@ -156,6 +156,24 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let b_ty = self.read_type_id(&args[1])?;
self.write_scalar(Scalar::from_bool(a_ty == b_ty), dest)?;
}
sym::size_of => {
let tp_ty = instance.args.type_at(0);
let layout = self.layout_of(tp_ty)?;
if !layout.is_sized() {
span_bug!(self.cur_span(), "unsized type for `size_of`");
}
let val = layout.size.bytes();
self.write_scalar(Scalar::from_target_usize(val, self), dest)?;
}
sym::align_of => {
let tp_ty = instance.args.type_at(0);
let layout = self.layout_of(tp_ty)?;
if !layout.is_sized() {
span_bug!(self.cur_span(), "unsized type for `align_of`");
}
let val = layout.align.bytes();
self.write_scalar(Scalar::from_target_usize(val, self), dest)?;
}
sym::variant_count => {
let tp_ty = instance.args.type_at(0);
let ty = match tp_ty.kind() {

View file

@ -517,20 +517,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let usize_layout = || self.layout_of(self.tcx.types.usize).unwrap();
interp_ok(match null_op {
SizeOf => {
if !layout.is_sized() {
span_bug!(self.cur_span(), "unsized type for `NullaryOp::SizeOf`");
}
let val = layout.size.bytes();
ImmTy::from_uint(val, usize_layout())
}
AlignOf => {
if !layout.is_sized() {
span_bug!(self.cur_span(), "unsized type for `NullaryOp::AlignOf`");
}
let val = layout.align.bytes();
ImmTy::from_uint(val, usize_layout())
}
OffsetOf(fields) => {
let val =
self.tcx.offset_of_subfield(self.typing_env, layout, fields.iter()).bytes();

View file

@ -2,7 +2,7 @@
//!
//! OpTy and PlaceTy generally work by "let's see if we are actually an MPlaceTy, and do something custom if not".
//! For PlaceTy, the custom thing is basically always to call `force_allocation` and then use the MPlaceTy logic anyway.
//! For OpTy, the custom thing on field pojections has to be pretty clever (since `Operand::Immediate` can have fields),
//! For OpTy, the custom thing on field projections has to be pretty clever (since `Operand::Immediate` can have fields),
//! but for array/slice operations it only has to worry about `Operand::Uninit`. That makes the value part trivial,
//! but we still need to do bounds checking and adjust the layout. To not duplicate that with MPlaceTy, we actually
//! implement the logic on OpTy, and MPlaceTy calls that.

View file

@ -74,7 +74,7 @@ impl EnteredTraceSpan for tracing::span::EnteredSpan {
}
}
/// Shortand for calling [crate::interpret::Machine::enter_trace_span] on a [tracing::info_span!].
/// Shorthand for calling [crate::interpret::Machine::enter_trace_span] on a [tracing::info_span!].
/// This is supposed to be compiled out when [crate::interpret::Machine::enter_trace_span] has the
/// default implementation (i.e. when it does not actually enter the span but instead returns `()`).
/// This macro takes a type implementing the [crate::interpret::Machine] trait as its first argument

View file

@ -1261,9 +1261,10 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt,
// When you extend this match, make sure to also add tests to
// tests/ui/type/pattern_types/validity.rs((
match **pat {
// Range patterns are precisely reflected into `valid_range` and thus
// Range and non-null patterns are precisely reflected into `valid_range` and thus
// handled fully by `visit_scalar` (called below).
ty::PatternKind::Range { .. } => {},
ty::PatternKind::NotNull => {},
// FIXME(pattern_types): check that the value is covered by one of the variants.
// For now, we rely on layout computation setting the scalar's `valid_range` to

View file

@ -269,7 +269,6 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
make_codegen_backend: None,
registry: diagnostics_registry(),
using_internal_features: &USING_INTERNAL_FEATURES,
expanded_args: args,
};
callbacks.config(&mut config);
@ -521,11 +520,11 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
};
// Try to prettify the raw markdown text. The result can be used by the pager or on stdout.
let pretty_data = {
let mut pretty_data = {
let mdstream = markdown::MdStream::parse_str(content);
let bufwtr = markdown::create_stdout_bufwtr();
let mut mdbuf = bufwtr.buffer();
if mdstream.write_termcolor_buf(&mut mdbuf).is_ok() { Some((bufwtr, mdbuf)) } else { None }
let mut mdbuf = Vec::new();
if mdstream.write_anstream_buf(&mut mdbuf).is_ok() { Some((bufwtr, mdbuf)) } else { None }
};
// Try to print via the pager, pretty output if possible.
@ -546,8 +545,8 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
}
// The pager failed. Try to print pretty output to stdout.
if let Some((bufwtr, mdbuf)) = &pretty_data
&& bufwtr.print(mdbuf).is_ok()
if let Some((bufwtr, mdbuf)) = &mut pretty_data
&& bufwtr.write_all(&mdbuf).is_ok()
{
return;
}

View file

@ -5,7 +5,9 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
annotate-snippets = "0.11"
annotate-snippets = "0.12.7"
anstream = "0.6.20"
anstyle = "1.0.13"
derive_setters = "0.1.6"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
@ -22,7 +24,6 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
serde = { version = "1.0.125", features = ["derive"] }
serde_json = "1.0.59"
termcolor = "1.2.0"
termize = "0.2"
tracing = "0.1"
# tidy-alphabetical-end

View file

@ -5,32 +5,70 @@
//!
//! [annotate_snippets]: https://docs.rs/crate/annotate-snippets/
use std::borrow::Cow;
use std::error::Report;
use std::fmt::Debug;
use std::io;
use std::io::Write;
use std::sync::Arc;
use annotate_snippets::{Renderer, Snippet};
use rustc_error_messages::FluentArgs;
use rustc_span::SourceFile;
use annotate_snippets::renderer::DEFAULT_TERM_WIDTH;
use annotate_snippets::{AnnotationKind, Group, Origin, Padding, Patch, Renderer, Snippet};
use anstream::ColorChoice;
use derive_setters::Setters;
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_error_messages::{FluentArgs, SpanLabel};
use rustc_lint_defs::pluralize;
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, FileName, Pos, SourceFile, Span};
use tracing::debug;
use crate::emitter::FileWithAnnotatedLines;
use crate::emitter::{
ConfusionType, Destination, MAX_SUGGESTIONS, OutputTheme, detect_confusion_type, is_different,
normalize_whitespace, should_show_source_code,
};
use crate::registry::Registry;
use crate::snippet::Line;
use crate::translation::{Translator, to_fluent_args};
use crate::{
CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, Level, MultiSpan, Style, Subdiag,
SuggestionStyle, TerminalUrl,
};
/// Generates diagnostics using annotate-snippet
#[derive(Setters)]
pub struct AnnotateSnippetEmitter {
source_map: Option<Arc<SourceMap>>,
#[setters(skip)]
dst: IntoDynSyncSend<Destination>,
sm: Option<Arc<SourceMap>>,
#[setters(skip)]
translator: Translator,
/// If true, hides the longer explanation text
short_message: bool,
/// If true, will normalize line numbers with `LL` to prevent noise in UI test diffs.
ui_testing: bool,
ignored_directories_in_source_blocks: Vec<String>,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
terminal_url: TerminalUrl,
theme: OutputTheme,
}
impl Debug for AnnotateSnippetEmitter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AnnotateSnippetEmitter")
.field("short_message", &self.short_message)
.field("ui_testing", &self.ui_testing)
.field(
"ignored_directories_in_source_blocks",
&self.ignored_directories_in_source_blocks,
)
.field("diagnostic_width", &self.diagnostic_width)
.field("macro_backtrace", &self.macro_backtrace)
.field("track_diagnostics", &self.track_diagnostics)
.field("terminal_url", &self.terminal_url)
.field("theme", &self.theme)
.finish()
}
}
impl Emitter for AnnotateSnippetEmitter {
@ -38,6 +76,10 @@ impl Emitter for AnnotateSnippetEmitter {
fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) {
let fluent_args = to_fluent_args(diag.args.iter());
if self.track_diagnostics && diag.span.has_primary_spans() && !diag.span.is_dummy() {
diag.children.insert(0, diag.emitted_at_sub_diag());
}
let mut suggestions = diag.suggestions.unwrap_tag();
self.primary_span_formatted(&mut diag.span, &mut suggestions, &fluent_args);
@ -55,12 +97,12 @@ impl Emitter for AnnotateSnippetEmitter {
&diag.code,
&diag.span,
&diag.children,
&suggestions,
suggestions,
);
}
fn source_map(&self) -> Option<&SourceMap> {
self.source_map.as_deref()
self.sm.as_deref()
}
fn should_show_explain(&self) -> bool {
@ -70,128 +112,648 @@ impl Emitter for AnnotateSnippetEmitter {
fn translator(&self) -> &Translator {
&self.translator
}
fn supports_color(&self) -> bool {
false
}
}
/// Provides the source string for the given `line` of `file`
fn source_string(file: Arc<SourceFile>, line: &Line) -> String {
file.get_line(line.line_index - 1).map(|a| a.to_string()).unwrap_or_default()
}
/// Maps [`crate::Level`] to [`annotate_snippets::Level`]
fn annotation_level_for_level(level: Level) -> annotate_snippets::Level {
fn annotation_level_for_level(level: Level) -> annotate_snippets::level::Level<'static> {
match level {
Level::Bug | Level::Fatal | Level::Error | Level::DelayedBug => {
annotate_snippets::Level::Error
Level::Bug | Level::DelayedBug => {
annotate_snippets::Level::ERROR.with_name("error: internal compiler error")
}
Level::ForceWarning | Level::Warning => annotate_snippets::Level::Warning,
Level::Note | Level::OnceNote => annotate_snippets::Level::Note,
Level::Help | Level::OnceHelp => annotate_snippets::Level::Help,
// FIXME(#59346): Not sure how to map this level
Level::FailureNote => annotate_snippets::Level::Error,
Level::Fatal | Level::Error => annotate_snippets::level::ERROR,
Level::ForceWarning | Level::Warning => annotate_snippets::Level::WARNING,
Level::Note | Level::OnceNote => annotate_snippets::Level::NOTE,
Level::Help | Level::OnceHelp => annotate_snippets::Level::HELP,
Level::FailureNote => annotate_snippets::Level::NOTE.no_name(),
Level::Allow => panic!("Should not call with Allow"),
Level::Expect => panic!("Should not call with Expect"),
}
}
impl AnnotateSnippetEmitter {
pub fn new(
source_map: Option<Arc<SourceMap>>,
translator: Translator,
short_message: bool,
macro_backtrace: bool,
) -> Self {
Self { source_map, translator, short_message, ui_testing: false, macro_backtrace }
}
/// Allows to modify `Self` to enable or disable the `ui_testing` flag.
///
/// If this is set to true, line numbers will be normalized as `LL` in the output.
pub fn ui_testing(mut self, ui_testing: bool) -> Self {
self.ui_testing = ui_testing;
self
pub fn new(dst: Destination, translator: Translator) -> Self {
Self {
dst: IntoDynSyncSend(dst),
sm: None,
translator,
short_message: false,
ui_testing: false,
ignored_directories_in_source_blocks: Vec::new(),
diagnostic_width: None,
macro_backtrace: false,
track_diagnostics: false,
terminal_url: TerminalUrl::No,
theme: OutputTheme::Ascii,
}
}
fn emit_messages_default(
&mut self,
level: &Level,
messages: &[(DiagMessage, Style)],
msgs: &[(DiagMessage, Style)],
args: &FluentArgs<'_>,
code: &Option<ErrCode>,
msp: &MultiSpan,
_children: &[Subdiag],
_suggestions: &[CodeSuggestion],
children: &[Subdiag],
suggestions: Vec<CodeSuggestion>,
) {
let message = self.translator.translate_messages(messages, args);
if let Some(source_map) = &self.source_map {
// Make sure our primary file comes first
let primary_lo = if let Some(primary_span) = msp.primary_span().as_ref() {
if primary_span.is_dummy() {
// FIXME(#59346): Not sure when this is the case and what
// should be done if it happens
return;
} else {
source_map.lookup_char_pos(primary_span.lo())
}
} else {
// FIXME(#59346): Not sure when this is the case and what
// should be done if it happens
return;
};
let mut annotated_files = FileWithAnnotatedLines::collect_annotations(self, args, msp);
if let Ok(pos) =
annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name))
{
annotated_files.swap(0, pos);
}
// owned: file name, line source, line index, annotations
type Owned = (String, String, usize, Vec<crate::snippet::Annotation>);
let annotated_files: Vec<Owned> = annotated_files
.into_iter()
.flat_map(|annotated_file| {
let file = annotated_file.file;
annotated_file
.lines
.into_iter()
.map(|line| {
// Ensure the source file is present before we try
// to load a string from it.
// FIXME(#115869): support -Z ignore-directory-in-diagnostics-source-blocks
source_map.ensure_source_file_source_present(&file);
(
format!("{}", source_map.filename_for_diagnostics(&file.name)),
source_string(Arc::clone(&file), &line),
line.line_index,
line.annotations,
)
})
.collect::<Vec<Owned>>()
})
.collect();
let code = code.map(|code| code.to_string());
let renderer = self.renderer();
let annotation_level = annotation_level_for_level(*level);
let snippets =
annotated_files.iter().map(|(file_name, source, line_index, annotations)| {
Snippet::source(source)
.line_start(*line_index)
.origin(file_name)
// FIXME(#59346): Not really sure when `fold` should be true or false
.fold(false)
.annotations(annotations.iter().map(|annotation| {
annotation_level_for_level(*level)
.span(annotation.start_col.display..annotation.end_col.display)
.label(annotation.label.as_deref().unwrap_or_default())
}))
});
let mut message = annotation_level_for_level(*level).title(&message).snippets(snippets);
if let Some(code) = code.as_deref() {
message = message.id(code)
// If at least one portion of the message is styled, we need to
// "pre-style" the message
let mut title = if msgs.iter().any(|(_, style)| style != &crate::Style::NoStyle) {
annotation_level
.clone()
.secondary_title(Cow::Owned(self.pre_style_msgs(msgs, *level, args)))
} else {
annotation_level.clone().primary_title(self.translator.translate_messages(msgs, args))
};
if let Some(c) = code {
title = title.id(c.to_string());
if let TerminalUrl::Yes = self.terminal_url {
title = title.id_url(format!("https://doc.rust-lang.org/error_codes/{c}.html"));
}
// FIXME(#59346): Figure out if we can _always_ print to stderr or not.
// `emitter.rs` has the `Destination` enum that lists various possible output
// destinations.
let renderer = Renderer::plain().anonymized_line_numbers(self.ui_testing);
eprintln!("{}", renderer.render(message))
}
// FIXME(#59346): Is it ok to return None if there's no source_map?
let mut report = vec![];
let mut group = Group::with_title(title);
// If we don't have span information, emit and exit
let Some(sm) = self.sm.as_ref() else {
group = group.elements(children.iter().map(|c| {
let msg = self.translator.translate_messages(&c.messages, args).to_string();
let level = annotation_level_for_level(c.level);
level.message(msg)
}));
report.push(group);
if let Err(e) = emit_to_destination(
renderer.render(&report),
level,
&mut self.dst,
self.short_message,
) {
panic!("failed to emit error: {e}");
}
return;
};
let mut file_ann = collect_annotations(args, msp, sm, &self.translator);
// Make sure our primary file comes first
let primary_span = msp.primary_span().unwrap_or_default();
if !primary_span.is_dummy() {
let primary_lo = sm.lookup_char_pos(primary_span.lo());
if let Ok(pos) = file_ann.binary_search_by(|(f, _)| f.name.cmp(&primary_lo.file.name)) {
file_ann.swap(0, pos);
}
for (file_idx, (file, annotations)) in file_ann.into_iter().enumerate() {
if should_show_source_code(&self.ignored_directories_in_source_blocks, sm, &file) {
if let Some(snippet) = self.annotated_snippet(annotations, &file.name, sm) {
group = group.element(snippet);
}
// we can't annotate anything if the source is unavailable.
} else if !self.short_message {
// We'll just print unannotated messages
group = self.unannotated_messages(
annotations,
&file.name,
sm,
file_idx,
&mut report,
group,
&annotation_level,
);
// If this is the last annotation for a file, and
// this is the last file, and the first child is a
// "secondary" message, we need to add padding
// ╭▸ /rustc/FAKE_PREFIX/library/core/src/clone.rs:236:13
// │
// ├ note: the late bound lifetime parameter
// │ (<- It adds *this*)
// ╰ warning: this was previously accepted
if let Some(c) = children.first()
&& (!c.span.has_primary_spans() && !c.span.has_span_labels())
{
group = group.element(Padding);
}
}
}
}
for c in children {
let level = annotation_level_for_level(c.level);
// If at least one portion of the message is styled, we need to
// "pre-style" the message
let msg = if c.messages.iter().any(|(_, style)| style != &crate::Style::NoStyle) {
Cow::Owned(self.pre_style_msgs(&c.messages, c.level, args))
} else {
self.translator.translate_messages(&c.messages, args)
};
// This is a secondary message with no span info
if !c.span.has_primary_spans() && !c.span.has_span_labels() {
group = group.element(level.clone().message(msg));
continue;
}
report.push(std::mem::replace(
&mut group,
Group::with_title(level.clone().secondary_title(msg)),
));
let mut file_ann = collect_annotations(args, &c.span, sm, &self.translator);
let primary_span = c.span.primary_span().unwrap_or_default();
if !primary_span.is_dummy() {
let primary_lo = sm.lookup_char_pos(primary_span.lo());
if let Ok(pos) =
file_ann.binary_search_by(|(f, _)| f.name.cmp(&primary_lo.file.name))
{
file_ann.swap(0, pos);
}
}
for (file_idx, (file, annotations)) in file_ann.into_iter().enumerate() {
if should_show_source_code(&self.ignored_directories_in_source_blocks, sm, &file) {
if let Some(snippet) = self.annotated_snippet(annotations, &file.name, sm) {
group = group.element(snippet);
}
// we can't annotate anything if the source is unavailable.
} else if !self.short_message {
// We'll just print unannotated messages
group = self.unannotated_messages(
annotations,
&file.name,
sm,
file_idx,
&mut report,
group,
&level,
);
}
}
}
let suggestions_expected = suggestions
.iter()
.filter(|s| {
matches!(
s.style,
SuggestionStyle::HideCodeInline
| SuggestionStyle::ShowCode
| SuggestionStyle::ShowAlways
)
})
.count();
for suggestion in suggestions {
match suggestion.style {
SuggestionStyle::CompletelyHidden => {
// do not display this suggestion, it is meant only for tools
}
SuggestionStyle::HideCodeAlways => {
let msg = self
.translator
.translate_messages(&[(suggestion.msg.to_owned(), Style::HeaderMsg)], args);
group = group.element(annotate_snippets::Level::HELP.message(msg));
}
SuggestionStyle::HideCodeInline
| SuggestionStyle::ShowCode
| SuggestionStyle::ShowAlways => {
let substitutions = suggestion
.substitutions
.into_iter()
.filter_map(|mut subst| {
// Suggestions coming from macros can have malformed spans. This is a heavy
// handed approach to avoid ICEs by ignoring the suggestion outright.
let invalid =
subst.parts.iter().any(|item| sm.is_valid_span(item.span).is_err());
if invalid {
debug!("suggestion contains an invalid span: {:?}", subst);
}
// Assumption: all spans are in the same file, and all spans
// are disjoint. Sort in ascending order.
subst.parts.sort_by_key(|part| part.span.lo());
// Verify the assumption that all spans are disjoint
assert_eq!(
subst.parts.array_windows().find(|[a, b]| a.span.overlaps(b.span)),
None,
"all spans must be disjoint",
);
// Account for cases where we are suggesting the same code that's already
// there. This shouldn't happen often, but in some cases for multipart
// suggestions it's much easier to handle it here than in the origin.
subst.parts.retain(|p| is_different(sm, &p.snippet, p.span));
let item_span = subst.parts.first()?;
let file = sm.lookup_source_file(item_span.span.lo());
if !invalid
&& should_show_source_code(
&self.ignored_directories_in_source_blocks,
sm,
&file,
)
{
Some(subst)
} else {
None
}
})
.collect::<Vec<_>>();
if substitutions.is_empty() {
continue;
}
let mut msg = self
.translator
.translate_message(&suggestion.msg, args)
.map_err(Report::new)
.unwrap()
.to_string();
let lo = substitutions
.iter()
.find_map(|sub| sub.parts.first().map(|p| p.span.lo()))
.unwrap();
let file = sm.lookup_source_file(lo);
let filename =
sm.filename_for_diagnostics(&file.name).to_string_lossy().to_string();
let other_suggestions = substitutions.len().saturating_sub(MAX_SUGGESTIONS);
let subs = substitutions
.into_iter()
.take(MAX_SUGGESTIONS)
.filter_map(|sub| {
let mut confusion_type = ConfusionType::None;
for part in &sub.parts {
let part_confusion =
detect_confusion_type(sm, &part.snippet, part.span);
confusion_type = confusion_type.combine(part_confusion);
}
if !matches!(confusion_type, ConfusionType::None) {
msg.push_str(confusion_type.label_text());
}
let mut parts = sub
.parts
.into_iter()
.filter_map(|p| {
if is_different(sm, &p.snippet, p.span) {
Some((p.span, p.snippet))
} else {
None
}
})
.collect::<Vec<_>>();
if parts.is_empty() {
None
} else {
let spans = parts.iter().map(|(span, _)| *span).collect::<Vec<_>>();
// The suggestion adds an entire line of code, ending on a newline, so we'll also
// print the *following* line, to provide context of what we're advising people to
// do. Otherwise you would only see contextless code that can be confused for
// already existing code, despite the colors and UI elements.
// We special case `#[derive(_)]\n` and other attribute suggestions, because those
// are the ones where context is most useful.
let fold = if let [(p, snippet)] = &mut parts[..]
&& snippet.trim().starts_with("#[")
// This allows for spaces to come between the attribute and the newline
&& snippet.trim().ends_with("]")
&& snippet.ends_with('\n')
&& p.hi() == p.lo()
&& let Ok(b) = sm.span_to_prev_source(*p)
&& let b = b.rsplit_once('\n').unwrap_or_else(|| ("", &b)).1
&& b.trim().is_empty()
{
// FIXME: This is a hack:
// The span for attribute suggestions often times points to the
// beginning of an item, disregarding leading whitespace. This
// causes the attribute to be properly indented, but leaves original
// item without indentation when rendered.
// This fixes that problem by adjusting the span to point to the start
// of the whitespace, and adds the whitespace to the replacement.
//
// Source: " extern "custom" fn negate(a: i64) -> i64 {\n"
// Span: 4..4
// Replacement: "#[unsafe(naked)]\n"
//
// Before:
// help: convert this to an `#[unsafe(naked)]` function
// |
// LL + #[unsafe(naked)]
// LL | extern "custom" fn negate(a: i64) -> i64 {
// |
//
// After
// help: convert this to an `#[unsafe(naked)]` function
// |
// LL + #[unsafe(naked)]
// LL | extern "custom" fn negate(a: i64) -> i64 {
// |
if !b.is_empty() && !snippet.ends_with(b) {
snippet.insert_str(0, b);
let offset = BytePos(b.len() as u32);
*p = p.with_lo(p.lo() - offset).shrink_to_lo();
}
false
} else {
true
};
if let Some((bounding_span, source, line_offset)) =
shrink_file(spans.as_slice(), &file.name, sm)
{
let adj_lo = bounding_span.lo().to_usize();
Some(
Snippet::source(source)
.line_start(line_offset)
.path(filename.clone())
.fold(fold)
.patches(parts.into_iter().map(
|(span, replacement)| {
let lo =
span.lo().to_usize().saturating_sub(adj_lo);
let hi =
span.hi().to_usize().saturating_sub(adj_lo);
Patch::new(lo..hi, replacement)
},
)),
)
} else {
None
}
}
})
.collect::<Vec<_>>();
if !subs.is_empty() {
report.push(std::mem::replace(
&mut group,
Group::with_title(annotate_snippets::Level::HELP.secondary_title(msg)),
));
group = group.elements(subs);
if other_suggestions > 0 {
group = group.element(
annotate_snippets::Level::NOTE.no_name().message(format!(
"and {} other candidate{}",
other_suggestions,
pluralize!(other_suggestions)
)),
);
}
}
}
}
}
// FIXME: This hack should be removed once annotate_snippets is the
// default emitter.
if suggestions_expected > 0 && report.is_empty() {
group = group.element(Padding);
}
if !group.is_empty() {
report.push(group);
}
if let Err(e) =
emit_to_destination(renderer.render(&report), level, &mut self.dst, self.short_message)
{
panic!("failed to emit error: {e}");
}
}
fn renderer(&self) -> Renderer {
let width = if let Some(width) = self.diagnostic_width {
width
} else if self.ui_testing || cfg!(miri) {
DEFAULT_TERM_WIDTH
} else {
termize::dimensions().map(|(w, _)| w).unwrap_or(DEFAULT_TERM_WIDTH)
};
let decor_style = match self.theme {
OutputTheme::Ascii => annotate_snippets::renderer::DecorStyle::Ascii,
OutputTheme::Unicode => annotate_snippets::renderer::DecorStyle::Unicode,
};
match self.dst.current_choice() {
ColorChoice::AlwaysAnsi | ColorChoice::Always | ColorChoice::Auto => Renderer::styled(),
ColorChoice::Never => Renderer::plain(),
}
.term_width(width)
.anonymized_line_numbers(self.ui_testing)
.decor_style(decor_style)
.short_message(self.short_message)
}
fn pre_style_msgs(
&self,
msgs: &[(DiagMessage, Style)],
level: Level,
args: &FluentArgs<'_>,
) -> String {
msgs.iter()
.filter_map(|(m, style)| {
let text = self.translator.translate_message(m, args).map_err(Report::new).unwrap();
let style = style.anstyle(level);
if text.is_empty() { None } else { Some(format!("{style}{text}{style:#}")) }
})
.collect()
}
fn annotated_snippet<'a>(
&self,
annotations: Vec<Annotation>,
file_name: &FileName,
sm: &Arc<SourceMap>,
) -> Option<Snippet<'a, annotate_snippets::Annotation<'a>>> {
let spans = annotations.iter().map(|a| a.span).collect::<Vec<_>>();
if let Some((bounding_span, source, offset_line)) = shrink_file(&spans, file_name, sm) {
let adj_lo = bounding_span.lo().to_usize();
let filename = sm.filename_for_diagnostics(file_name).to_string_lossy().to_string();
Some(Snippet::source(source).line_start(offset_line).path(filename).annotations(
annotations.into_iter().map(move |a| {
let lo = a.span.lo().to_usize().saturating_sub(adj_lo);
let hi = a.span.hi().to_usize().saturating_sub(adj_lo);
let ann = a.kind.span(lo..hi);
if let Some(label) = a.label { ann.label(label) } else { ann }
}),
))
} else {
None
}
}
fn unannotated_messages<'a>(
&self,
annotations: Vec<Annotation>,
file_name: &FileName,
sm: &Arc<SourceMap>,
file_idx: usize,
report: &mut Vec<Group<'a>>,
mut group: Group<'a>,
level: &annotate_snippets::level::Level<'static>,
) -> Group<'a> {
let filename = sm.filename_for_diagnostics(file_name).to_string_lossy().to_string();
let mut line_tracker = vec![];
for (i, a) in annotations.into_iter().enumerate() {
let lo = sm.lookup_char_pos(a.span.lo());
let hi = sm.lookup_char_pos(a.span.hi());
if i == 0 || (a.label.is_some()) {
// Render each new file after the first in its own Group
// ╭▸ $DIR/deriving-meta-unknown-trait.rs:1:10
// │
// LL │ #[derive(Eqr)]
// │ ━━━
// ╰╴ (<- It makes it so *this* will get printed)
// ╭▸ $SRC_DIR/core/src/option.rs:594:0
// ⸬ $SRC_DIR/core/src/option.rs:602:4
// │
// ╰ note: not covered
if i == 0 && file_idx != 0 {
report.push(std::mem::replace(&mut group, Group::with_level(level.clone())));
}
if !line_tracker.contains(&lo.line) {
line_tracker.push(lo.line);
// ╭▸ $SRC_DIR/core/src/option.rs:594:0 (<- It adds *this*)
// ⸬ $SRC_DIR/core/src/option.rs:602:4
// │
// ╰ note: not covered
group = group.element(
Origin::path(filename.clone())
.line(sm.doctest_offset_line(file_name, lo.line))
.char_column(lo.col_display),
);
}
if hi.line > lo.line
&& a.label.as_ref().is_some_and(|l| !l.is_empty())
&& !line_tracker.contains(&hi.line)
{
line_tracker.push(hi.line);
// ╭▸ $SRC_DIR/core/src/option.rs:594:0
// ⸬ $SRC_DIR/core/src/option.rs:602:4 (<- It adds *this*)
// │
// ╰ note: not covered
group = group.element(
Origin::path(filename.clone())
.line(sm.doctest_offset_line(file_name, hi.line))
.char_column(hi.col_display),
);
}
if let Some(label) = a.label
&& !label.is_empty()
{
// ╭▸ $SRC_DIR/core/src/option.rs:594:0
// ⸬ $SRC_DIR/core/src/option.rs:602:4
// │ (<- It adds *this*)
// ╰ note: not covered (<- and *this*)
group = group
.element(Padding)
.element(annotate_snippets::Level::NOTE.message(label));
}
}
}
group
}
}
fn emit_to_destination(
rendered: String,
lvl: &Level,
dst: &mut Destination,
short_message: bool,
) -> io::Result<()> {
use crate::lock;
let _buffer_lock = lock::acquire_global_lock("rustc_errors");
writeln!(dst, "{rendered}")?;
if !short_message && !lvl.is_failure_note() {
writeln!(dst)?;
}
dst.flush()?;
Ok(())
}
#[derive(Debug)]
struct Annotation {
kind: AnnotationKind,
span: Span,
label: Option<String>,
}
fn collect_annotations(
args: &FluentArgs<'_>,
msp: &MultiSpan,
sm: &Arc<SourceMap>,
translator: &Translator,
) -> Vec<(Arc<SourceFile>, Vec<Annotation>)> {
let mut output: Vec<(Arc<SourceFile>, Vec<Annotation>)> = vec![];
for SpanLabel { span, is_primary, label } in msp.span_labels() {
// If we don't have a useful span, pick the primary span if that exists.
// Worst case we'll just print an error at the top of the main file.
let span = match (span.is_dummy(), msp.primary_span()) {
(_, None) | (false, _) => span,
(true, Some(span)) => span,
};
let file = sm.lookup_source_file(span.lo());
let kind = if is_primary { AnnotationKind::Primary } else { AnnotationKind::Context };
let label = label.as_ref().map(|m| {
normalize_whitespace(
&translator.translate_message(m, args).map_err(Report::new).unwrap(),
)
});
let ann = Annotation { kind, span, label };
if sm.is_valid_span(ann.span).is_ok() {
// Look through each of our files for the one we're adding to. We
// use each files `stable_id` to avoid issues with file name
// collisions when multiple versions of the same crate are present
// in the dependency graph
if let Some((_, annotations)) =
output.iter_mut().find(|(f, _)| f.stable_id == file.stable_id)
{
annotations.push(ann);
} else {
output.push((file, vec![ann]));
}
}
}
output
}
fn shrink_file(
spans: &[Span],
file_name: &FileName,
sm: &Arc<SourceMap>,
) -> Option<(Span, String, usize)> {
let lo_byte = spans.iter().map(|s| s.lo()).min()?;
let lo_loc = sm.lookup_char_pos(lo_byte);
let lo = lo_loc.file.line_bounds(lo_loc.line.saturating_sub(1)).start;
let hi_byte = spans.iter().map(|s| s.hi()).max()?;
let hi_loc = sm.lookup_char_pos(hi_byte);
let hi = lo_loc.file.line_bounds(hi_loc.line.saturating_sub(1)).end;
let bounding_span = Span::with_root_ctxt(lo, hi);
let source = sm.span_to_snippet(bounding_span).unwrap_or_default();
let offset_line = sm.doctest_offset_line(file_name, lo_loc.line);
Some((bounding_span, source, offset_line))
}

View file

@ -945,6 +945,11 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> {
None,
"Span must not be empty and have no suggestion",
);
debug_assert_eq!(
parts.array_windows().find(|[a, b]| a.span.overlaps(b.span)),
None,
"suggestion must not have overlapping parts",
);
self.push_suggestion(CodeSuggestion {
substitutions: vec![Substitution { parts }],

View file

@ -16,6 +16,8 @@ use std::iter;
use std::path::Path;
use std::sync::Arc;
use anstream::{AutoStream, ColorChoice};
use anstyle::{AnsiColor, Effects};
use derive_setters::Setters;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_data_structures::sync::{DynSend, IntoDynSyncSend};
@ -25,7 +27,6 @@ use rustc_lint_defs::pluralize;
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::source_map::SourceMap;
use rustc_span::{FileLines, FileName, SourceFile, Span, char_width, str_width};
use termcolor::{Buffer, BufferWriter, Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use tracing::{debug, instrument, trace, warn};
use crate::registry::Registry;
@ -525,10 +526,6 @@ impl Emitter for HumanEmitter {
!self.short_message
}
fn supports_color(&self) -> bool {
self.dst.supports_color()
}
fn translator(&self) -> &Translator {
&self.translator
}
@ -1701,7 +1698,6 @@ impl HumanEmitter {
} else {
col_sep_before_no_show_source = true;
}
// print out the span location and spacer before we print the annotated source
// to do this, we need to know if this span will be primary
let is_primary = primary_lo.file.name == annotated_file.file.name;
@ -2354,6 +2350,7 @@ impl HumanEmitter {
.sum();
let underline_start = (span_start_pos + start) as isize + offset;
let underline_end = (span_start_pos + start + sub_len) as isize + offset;
assert!(underline_start >= 0 && underline_end >= 0);
let padding: usize = max_line_num_len + 3;
for p in underline_start..underline_end {
if let DisplaySuggestion::Underline = show_code_change
@ -2702,8 +2699,7 @@ impl HumanEmitter {
[SubstitutionHighlight { start: 0, end }] if *end == line_to_add.len() => {
buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition);
}
[] => {
// FIXME: needed? Doesn't get exercised in any test.
[] | [SubstitutionHighlight { start: 0, end: 0 }] => {
self.draw_col_separator_no_space(buffer, *row_num, max_line_num_len + 1);
}
_ => {
@ -3127,7 +3123,6 @@ impl FileWithAnnotatedLines {
multiline_depth: 0,
});
}
let mut output = vec![];
let mut multiline_annotations = vec![];
@ -3361,7 +3356,7 @@ const OUTPUT_REPLACEMENTS: &[(char, &str)] = &[
('\u{2069}', "<EFBFBD>"),
];
fn normalize_whitespace(s: &str) -> String {
pub(crate) fn normalize_whitespace(s: &str) -> String {
const {
let mut i = 1;
while i < OUTPUT_REPLACEMENTS.len() {
@ -3406,7 +3401,7 @@ fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
)
}
fn emit_to_destination(
pub(crate) fn emit_to_destination(
rendered_buffer: &[Vec<StyledString>],
lvl: &Level,
dst: &mut Destination,
@ -3429,10 +3424,8 @@ fn emit_to_destination(
let _buffer_lock = lock::acquire_global_lock("rustc_errors");
for (pos, line) in rendered_buffer.iter().enumerate() {
for part in line {
let style = part.style.color_spec(*lvl);
dst.set_color(&style)?;
write!(dst, "{}", part.text)?;
dst.reset()?;
let style = part.style.anstyle(*lvl);
write!(dst, "{style}{}{style:#}", part.text)?;
}
if !short_message && (!lvl.is_failure_note() || pos != rendered_buffer.len() - 1) {
writeln!(dst)?;
@ -3442,11 +3435,11 @@ fn emit_to_destination(
Ok(())
}
pub type Destination = Box<dyn WriteColor + Send>;
pub type Destination = AutoStream<Box<dyn Write + Send>>;
struct Buffy {
buffer_writer: BufferWriter,
buffer: Buffer,
buffer_writer: std::io::Stderr,
buffer: Vec<u8>,
}
impl Write for Buffy {
@ -3455,7 +3448,7 @@ impl Write for Buffy {
}
fn flush(&mut self) -> io::Result<()> {
self.buffer_writer.print(&self.buffer)?;
self.buffer_writer.write_all(&self.buffer)?;
self.buffer.clear();
Ok(())
}
@ -3470,22 +3463,16 @@ impl Drop for Buffy {
}
}
impl WriteColor for Buffy {
fn supports_color(&self) -> bool {
self.buffer.supports_color()
}
fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {
self.buffer.set_color(spec)
}
fn reset(&mut self) -> io::Result<()> {
self.buffer.reset()
}
}
pub fn stderr_destination(color: ColorConfig) -> Destination {
let buffer_writer = std::io::stderr();
let choice = color.to_color_choice();
// We need to resolve `ColorChoice::Auto` before `Box`ing since
// `ColorChoice::Auto` on `dyn Write` will always resolve to `Never`
let choice = if matches!(choice, ColorChoice::Auto) {
AutoStream::choice(&buffer_writer)
} else {
choice
};
// On Windows we'll be performing global synchronization on the entire
// system for emitting rustc errors, so there's no need to buffer
// anything.
@ -3493,60 +3480,42 @@ pub fn stderr_destination(color: ColorConfig) -> Destination {
// On non-Windows we rely on the atomicity of `write` to ensure errors
// don't get all jumbled up.
if cfg!(windows) {
Box::new(StandardStream::stderr(choice))
AutoStream::new(Box::new(buffer_writer), choice)
} else {
let buffer_writer = BufferWriter::stderr(choice);
let buffer = buffer_writer.buffer();
Box::new(Buffy { buffer_writer, buffer })
let buffer = Vec::new();
AutoStream::new(Box::new(Buffy { buffer_writer, buffer }), choice)
}
}
/// On Windows, BRIGHT_BLUE is hard to read on black. Use cyan instead.
///
/// See #36178.
const BRIGHT_BLUE: Color = if cfg!(windows) { Color::Cyan } else { Color::Blue };
const BRIGHT_BLUE: anstyle::Style = if cfg!(windows) {
AnsiColor::BrightCyan.on_default()
} else {
AnsiColor::BrightBlue.on_default()
};
impl Style {
fn color_spec(&self, lvl: Level) -> ColorSpec {
let mut spec = ColorSpec::new();
pub(crate) fn anstyle(&self, lvl: Level) -> anstyle::Style {
match self {
Style::Addition => {
spec.set_fg(Some(Color::Green)).set_intense(true);
}
Style::Removal => {
spec.set_fg(Some(Color::Red)).set_intense(true);
}
Style::LineAndColumn => {}
Style::LineNumber => {
spec.set_bold(true);
spec.set_intense(true);
spec.set_fg(Some(BRIGHT_BLUE));
}
Style::Quotation => {}
Style::MainHeaderMsg => {
spec.set_bold(true);
if cfg!(windows) {
spec.set_intense(true).set_fg(Some(Color::White));
}
}
Style::UnderlinePrimary | Style::LabelPrimary => {
spec = lvl.color();
spec.set_bold(true);
}
Style::UnderlineSecondary | Style::LabelSecondary => {
spec.set_bold(true).set_intense(true);
spec.set_fg(Some(BRIGHT_BLUE));
}
Style::HeaderMsg | Style::NoStyle => {}
Style::Level(lvl) => {
spec = lvl.color();
spec.set_bold(true);
}
Style::Highlight => {
spec.set_bold(true).set_fg(Some(Color::Magenta));
Style::Addition => AnsiColor::BrightGreen.on_default(),
Style::Removal => AnsiColor::BrightRed.on_default(),
Style::LineAndColumn => anstyle::Style::new(),
Style::LineNumber => BRIGHT_BLUE.effects(Effects::BOLD),
Style::Quotation => anstyle::Style::new(),
Style::MainHeaderMsg => if cfg!(windows) {
AnsiColor::BrightWhite.on_default()
} else {
anstyle::Style::new()
}
.effects(Effects::BOLD),
Style::UnderlinePrimary | Style::LabelPrimary => lvl.color().effects(Effects::BOLD),
Style::UnderlineSecondary | Style::LabelSecondary => BRIGHT_BLUE.effects(Effects::BOLD),
Style::HeaderMsg | Style::NoStyle => anstyle::Style::new(),
Style::Level(lvl) => lvl.color().effects(Effects::BOLD),
Style::Highlight => AnsiColor::Magenta.on_default().effects(Effects::BOLD),
}
spec
}
}

View file

@ -15,6 +15,7 @@ use std::path::Path;
use std::sync::{Arc, Mutex};
use std::vec;
use anstream::{AutoStream, ColorChoice};
use derive_setters::Setters;
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_error_messages::FluentArgs;
@ -23,7 +24,6 @@ use rustc_span::Span;
use rustc_span::hygiene::ExpnData;
use rustc_span::source_map::{FilePathMapping, SourceMap};
use serde::Serialize;
use termcolor::{ColorSpec, WriteColor};
use crate::diagnostic::IsLint;
use crate::emitter::{
@ -333,7 +333,7 @@ impl Diagnostic {
// generate regular command line output and store it in the json
// A threadsafe buffer for writing.
#[derive(Default, Clone)]
#[derive(Clone)]
struct BufWriter(Arc<Mutex<Vec<u8>>>);
impl Write for BufWriter {
@ -344,19 +344,6 @@ impl Diagnostic {
self.0.lock().unwrap().flush()
}
}
impl WriteColor for BufWriter {
fn supports_color(&self) -> bool {
false
}
fn set_color(&mut self, _spec: &ColorSpec) -> io::Result<()> {
Ok(())
}
fn reset(&mut self) -> io::Result<()> {
Ok(())
}
}
let translated_message = je.translator.translate_messages(&diag.messages, &args);
@ -382,13 +369,15 @@ impl Diagnostic {
children
.insert(0, Diagnostic::from_sub_diagnostic(&diag.emitted_at_sub_diag(), &args, je));
}
let buf = BufWriter::default();
let mut dst: Destination = Box::new(buf.clone());
let buf = BufWriter(Arc::new(Mutex::new(Vec::new())));
let short = je.json_rendered.short();
match je.color_config {
ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)),
ColorConfig::Never => {}
}
let dst: Destination = AutoStream::new(
Box::new(buf.clone()),
match je.color_config.to_color_choice() {
ColorChoice::Auto => ColorChoice::Always,
choice => choice,
},
);
HumanEmitter::new(dst, je.translator.clone())
.short_message(short)
.sm(je.sm.clone())

View file

@ -39,6 +39,12 @@ use std::path::{Path, PathBuf};
use std::{fmt, panic};
use Level::*;
// Used by external projects such as `rust-gpu`.
// See https://github.com/rust-lang/rust/pull/115393.
pub use anstream::{AutoStream, ColorChoice};
pub use anstyle::{
Ansi256Color, AnsiColor, Color, EffectIter, Effects, Reset, RgbColor, Style as Anstyle,
};
pub use codes::*;
pub use decorate_diag::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer};
pub use diagnostic::{
@ -69,9 +75,6 @@ pub use rustc_span::fatal_error::{FatalError, FatalErrorMarker};
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, DUMMY_SP, Loc, Span};
pub use snippet::Style;
// Used by external projects such as `rust-gpu`.
// See https://github.com/rust-lang/rust/pull/115393.
pub use termcolor::{Color, ColorSpec, WriteColor};
use tracing::debug;
use crate::emitter::TimingEvent;
@ -397,17 +400,6 @@ impl CodeSuggestion {
// Assumption: all spans are in the same file, and all spans
// are disjoint. Sort in ascending order.
substitution.parts.sort_by_key(|part| part.span.lo());
// Verify the assumption that all spans are disjoint
assert_eq!(
substitution.parts.array_windows().find(|[a, b]| a.span.overlaps(b.span)),
None,
"all spans must be disjoint",
);
// Account for cases where we are suggesting the same code that's already
// there. This shouldn't happen often, but in some cases for multipart
// suggestions it's much easier to handle it here than in the origin.
substitution.parts.retain(|p| is_different(sm, &p.snippet, p.span));
// Find the bounding span.
let lo = substitution.parts.iter().map(|part| part.span.lo()).min()?;
@ -502,12 +494,16 @@ impl CodeSuggestion {
_ => 1,
})
.sum();
line_highlight.push(SubstitutionHighlight {
start: (cur_lo.col.0 as isize + acc) as usize,
end: (cur_lo.col.0 as isize + acc + len) as usize,
});
if !is_different(sm, &part.snippet, part.span) {
// Account for cases where we are suggesting the same code that's already
// there. This shouldn't happen often, but in some cases for multipart
// suggestions it's much easier to handle it here than in the origin.
} else {
line_highlight.push(SubstitutionHighlight {
start: (cur_lo.col.0 as isize + acc) as usize,
end: (cur_lo.col.0 as isize + acc + len) as usize,
});
}
buf.push_str(&part.snippet);
let cur_hi = sm.lookup_char_pos(part.span.hi());
// Account for the difference between the width of the current code and the
@ -1982,25 +1978,21 @@ impl fmt::Display for Level {
}
impl Level {
fn color(self) -> ColorSpec {
let mut spec = ColorSpec::new();
fn color(self) -> anstyle::Style {
match self {
Bug | Fatal | Error | DelayedBug => {
spec.set_fg(Some(Color::Red)).set_intense(true);
}
Bug | Fatal | Error | DelayedBug => AnsiColor::BrightRed.on_default(),
ForceWarning | Warning => {
spec.set_fg(Some(Color::Yellow)).set_intense(cfg!(windows));
if cfg!(windows) {
AnsiColor::BrightYellow.on_default()
} else {
AnsiColor::Yellow.on_default()
}
}
Note | OnceNote => {
spec.set_fg(Some(Color::Green)).set_intense(true);
}
Help | OnceHelp => {
spec.set_fg(Some(Color::Cyan)).set_intense(true);
}
FailureNote => {}
Note | OnceNote => AnsiColor::BrightGreen.on_default(),
Help | OnceHelp => AnsiColor::BrightCyan.on_default(),
FailureNote => anstyle::Style::new(),
Allow | Expect => unreachable!(),
}
spec
}
pub fn to_str(self) -> &'static str {

View file

@ -4,7 +4,6 @@
use std::io;
use termcolor::{Buffer, BufferWriter, ColorChoice};
mod parse;
mod term;
@ -19,15 +18,15 @@ impl<'a> MdStream<'a> {
parse::entrypoint(s)
}
/// Write formatted output to a termcolor buffer
pub fn write_termcolor_buf(&self, buf: &mut Buffer) -> io::Result<()> {
/// Write formatted output to an anstream buffer
pub fn write_anstream_buf(&self, buf: &mut Vec<u8>) -> io::Result<()> {
term::entrypoint(self, buf)
}
}
/// Create a termcolor buffer with the `Always` color choice
pub fn create_stdout_bufwtr() -> BufferWriter {
BufferWriter::stdout(ColorChoice::Always)
/// Create an anstream buffer with the `Always` color choice
pub fn create_stdout_bufwtr() -> anstream::Stdout {
anstream::Stdout::always(std::io::stdout())
}
/// A single tokentree within a Markdown document

View file

@ -1,7 +1,7 @@
use std::cell::Cell;
use std::io::{self, Write};
use termcolor::{Buffer, Color, ColorSpec, WriteColor};
use anstyle::{AnsiColor, Effects, Style};
use crate::markdown::{MdStream, MdTree};
@ -15,7 +15,7 @@ thread_local! {
}
/// Print to terminal output to a buffer
pub(crate) fn entrypoint(stream: &MdStream<'_>, buf: &mut Buffer) -> io::Result<()> {
pub(crate) fn entrypoint(stream: &MdStream<'_>, buf: &mut Vec<u8>) -> io::Result<()> {
#[cfg(not(test))]
if let Some((w, _)) = termize::dimensions() {
WIDTH.set(std::cmp::min(w, DEFAULT_COLUMN_WIDTH));
@ -23,57 +23,65 @@ pub(crate) fn entrypoint(stream: &MdStream<'_>, buf: &mut Buffer) -> io::Result<
write_stream(stream, buf, None, 0)?;
buf.write_all(b"\n")
}
/// Write the buffer, reset to the default style after each
fn write_stream(
MdStream(stream): &MdStream<'_>,
buf: &mut Buffer,
default: Option<&ColorSpec>,
buf: &mut Vec<u8>,
default: Option<Style>,
indent: usize,
) -> io::Result<()> {
match default {
Some(c) => buf.set_color(c)?,
None => buf.reset()?,
}
for tt in stream {
write_tt(tt, buf, indent)?;
if let Some(c) = default {
buf.set_color(c)?;
}
write_tt(tt, buf, default, indent)?;
}
reset_opt_style(buf, default)?;
buf.reset()?;
Ok(())
}
fn write_tt(tt: &MdTree<'_>, buf: &mut Buffer, indent: usize) -> io::Result<()> {
fn write_tt(
tt: &MdTree<'_>,
buf: &mut Vec<u8>,
default: Option<Style>,
indent: usize,
) -> io::Result<()> {
match tt {
MdTree::CodeBlock { txt, lang: _ } => {
buf.set_color(ColorSpec::new().set_dimmed(true))?;
buf.write_all(txt.as_bytes())?;
reset_opt_style(buf, default)?;
let style = Style::new().effects(Effects::DIMMED);
write!(buf, "{style}{txt}{style:#}")?;
render_opt_style(buf, default)?;
}
MdTree::CodeInline(txt) => {
buf.set_color(ColorSpec::new().set_dimmed(true))?;
write_wrapping(buf, txt, indent, None)?;
reset_opt_style(buf, default)?;
write_wrapping(buf, txt, indent, None, Some(Style::new().effects(Effects::DIMMED)))?;
render_opt_style(buf, default)?;
}
MdTree::Strong(txt) => {
buf.set_color(ColorSpec::new().set_bold(true))?;
write_wrapping(buf, txt, indent, None)?;
reset_opt_style(buf, default)?;
write_wrapping(buf, txt, indent, None, Some(Style::new().effects(Effects::BOLD)))?;
render_opt_style(buf, default)?;
}
MdTree::Emphasis(txt) => {
buf.set_color(ColorSpec::new().set_italic(true))?;
write_wrapping(buf, txt, indent, None)?;
reset_opt_style(buf, default)?;
write_wrapping(buf, txt, indent, None, Some(Style::new().effects(Effects::ITALIC)))?;
render_opt_style(buf, default)?;
}
MdTree::Strikethrough(txt) => {
buf.set_color(ColorSpec::new().set_strikethrough(true))?;
write_wrapping(buf, txt, indent, None)?;
reset_opt_style(buf, default)?;
write_wrapping(
buf,
txt,
indent,
None,
Some(Style::new().effects(Effects::STRIKETHROUGH)),
)?;
render_opt_style(buf, default)?;
}
MdTree::PlainText(txt) => {
write_wrapping(buf, txt, indent, None)?;
write_wrapping(buf, txt, indent, None, None)?;
}
MdTree::Link { disp, link } => {
write_wrapping(buf, disp, indent, Some(link))?;
write_wrapping(buf, disp, indent, Some(link), None)?;
}
MdTree::ParagraphBreak => {
buf.write_all(b"\n\n")?;
@ -88,34 +96,48 @@ fn write_tt(tt: &MdTree<'_>, buf: &mut Buffer, indent: usize) -> io::Result<()>
reset_cursor();
}
MdTree::Heading(n, stream) => {
let mut cs = ColorSpec::new();
cs.set_fg(Some(Color::Cyan));
match n {
1 => cs.set_intense(true).set_bold(true).set_underline(true),
2 => cs.set_intense(true).set_underline(true),
3 => cs.set_intense(true).set_italic(true),
4.. => cs.set_underline(true).set_italic(true),
let cs = match n {
1 => AnsiColor::BrightCyan.on_default().effects(Effects::BOLD | Effects::UNDERLINE),
2 => AnsiColor::BrightCyan.on_default().effects(Effects::UNDERLINE),
3 => AnsiColor::BrightCyan.on_default().effects(Effects::ITALIC),
4.. => AnsiColor::Cyan.on_default().effects(Effects::UNDERLINE | Effects::ITALIC),
0 => unreachable!(),
};
write_stream(stream, buf, Some(&cs), 0)?;
reset_opt_style(buf, default)?;
write!(buf, "{cs}")?;
write_stream(stream, buf, Some(cs), 0)?;
write!(buf, "{cs:#}")?;
render_opt_style(buf, default)?;
buf.write_all(b"\n")?;
}
MdTree::OrderedListItem(n, stream) => {
let base = format!("{n}. ");
write_wrapping(buf, &format!("{base:<4}"), indent, None)?;
write_wrapping(buf, &format!("{base:<4}"), indent, None, None)?;
write_stream(stream, buf, None, indent + 4)?;
}
MdTree::UnorderedListItem(stream) => {
let base = "* ";
write_wrapping(buf, &format!("{base:<4}"), indent, None)?;
write_wrapping(buf, &format!("{base:<4}"), indent, None, None)?;
write_stream(stream, buf, None, indent + 4)?;
}
// Patterns popped in previous step
MdTree::Comment(_) | MdTree::LinkDef { .. } | MdTree::RefLink { .. } => unreachable!(),
}
buf.reset()?;
Ok(())
}
fn render_opt_style(buf: &mut Vec<u8>, style: Option<Style>) -> io::Result<()> {
if let Some(style) = &style {
write!(buf, "{style}")?;
}
Ok(())
}
fn reset_opt_style(buf: &mut Vec<u8>, style: Option<Style>) -> io::Result<()> {
if let Some(style) = &style {
write!(buf, "{style:#}")?;
}
Ok(())
}
@ -126,12 +148,15 @@ fn reset_cursor() {
/// Change to be generic on Write for testing. If we have a link URL, we don't
/// count the extra tokens to make it clickable.
fn write_wrapping<B: io::Write>(
buf: &mut B,
fn write_wrapping(
buf: &mut Vec<u8>,
text: &str,
indent: usize,
link_url: Option<&str>,
style: Option<Style>,
) -> io::Result<()> {
render_opt_style(buf, style)?;
let ind_ws = &b" "[..indent];
let mut to_write = text;
if let Some(url) = link_url {
@ -179,7 +204,7 @@ fn write_wrapping<B: io::Write>(
if link_url.is_some() {
buf.write_all(b"\x1b]8;;\x1b\\")?;
}
reset_opt_style(buf, style)?;
Ok(())
})
}

View file

@ -1,35 +1,35 @@
H1 Heading ]8;;http://docs.rs\with a link]8;;\
H1 content: some words in bold and so does inline code
H1 Heading ]8;;http://docs.rs\with a link]8;;\
H1 content: some words in bold and so does inline code
H2 Heading
H2 content: some words in italic
H2 Heading
H2 content: some words in italic
H3 Heading
H3 content: strikethrough text
H3 Heading
H3 content: strikethrough text
H4 Heading
H4 content: A ]8;;https://docs.rs\simple link]8;;\ and a ]8;;http://docs.rs\remote-link]8;;\.
--------------------------------------------------------------------------------------------------------------------------------------------
A section break was above. We can also do paragraph breaks:
H4 Heading
H4 content: A ]8;;https://docs.rs\simple link]8;;\ and a ]8;;http://docs.rs\remote-link]8;;\.
--------------------------------------------------------------------------------------------------------------------------------------------
A section break was above. We can also do paragraph breaks:
(new paragraph) and unordered lists:
(new paragraph) and unordered lists:
* Item 1 in code
* Item 2 in italics
* Item 1 in code
* Item 2 in italics
Or ordered:
Or ordered:
1. Item 1 in bold
2. Item 2 with some long lines that should wrap: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean ac mattis nunc. Phasellus
elit quam, pulvinar ac risus in, dictum vehicula turpis. Vestibulum neque est, accumsan in cursus sit amet, dictum a nunc. Suspendisse
aliquet, lorem eu eleifend accumsan, magna neque sodales nisi, a aliquet lectus leo eu sem.
--------------------------------------------------------------------------------------------------------------------------------------------
Code
Both inline code and code blocks are supported:
1. Item 1 in bold
2. Item 2 with some long lines that should wrap: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean ac mattis nunc. Phasellus
elit quam, pulvinar ac risus in, dictum vehicula turpis. Vestibulum neque est, accumsan in cursus sit amet, dictum a nunc. Suspendisse
aliquet, lorem eu eleifend accumsan, magna neque sodales nisi, a aliquet lectus leo eu sem.
--------------------------------------------------------------------------------------------------------------------------------------------
Code
Both inline code and code blocks are supported:
/// A rust enum
/// A rust enum
#[derive(Debug, PartialEq, Clone)]
enum Foo {
/// Start of line
Bar
}
}

View file

@ -1,8 +1,5 @@
use std::io::BufWriter;
use std::path::PathBuf;
use termcolor::{BufferWriter, ColorChoice};
use super::*;
const INPUT: &str = include_str!("input.md");
@ -35,19 +32,20 @@ quis dolor non venenatis. Aliquam ut. ";
#[test]
fn test_wrapping_write() {
WIDTH.with(|w| w.set(TEST_WIDTH));
let mut buf = BufWriter::new(Vec::new());
let mut buf = Vec::new();
let txt = TXT.replace("-\n", "-").replace("_\n", "_").replace('\n', " ").replace(" ", "");
write_wrapping(&mut buf, &txt, 0, None).unwrap();
write_wrapping(&mut buf, &txt, 4, None).unwrap();
write_wrapping(&mut buf, &txt, 0, None, None).unwrap();
write_wrapping(&mut buf, &txt, 4, None, None).unwrap();
write_wrapping(
&mut buf,
"Sample link lorem ipsum dolor sit amet. ",
4,
Some("link-address-placeholder"),
None,
)
.unwrap();
write_wrapping(&mut buf, &txt, 0, None).unwrap();
let out = String::from_utf8(buf.into_inner().unwrap()).unwrap();
write_wrapping(&mut buf, &txt, 0, None, None).unwrap();
let out = String::from_utf8(buf).unwrap();
let out = out
.replace("\x1b\\", "")
.replace('\x1b', "")
@ -66,18 +64,17 @@ fn test_output() {
// Capture `--bless` when run via ./x
let bless = std::env::var_os("RUSTC_BLESS").is_some_and(|v| v != "0");
let ast = MdStream::parse_str(INPUT);
let bufwtr = BufferWriter::stderr(ColorChoice::Always);
let mut buffer = bufwtr.buffer();
ast.write_termcolor_buf(&mut buffer).unwrap();
let mut buffer = Vec::new();
ast.write_anstream_buf(&mut buffer).unwrap();
let mut blessed = PathBuf::new();
blessed.extend(OUTPUT_PATH);
if bless {
std::fs::write(&blessed, buffer.into_inner()).unwrap();
std::fs::write(&blessed, buffer.as_slice()).unwrap();
eprintln!("blessed output at {}", blessed.display());
} else {
let output = buffer.into_inner();
let output = buffer.as_slice();
if std::fs::read(blessed).unwrap() != output {
// hack: I don't know any way to write bytes to the captured stdout
// that cargo test uses

View file

@ -133,24 +133,29 @@ pub struct AttributeTemplate {
}
impl AttributeTemplate {
pub fn suggestions(&self, style: AttrStyle, name: impl std::fmt::Display) -> Vec<String> {
pub fn suggestions(
&self,
style: Option<AttrStyle>,
name: impl std::fmt::Display,
) -> Vec<String> {
let mut suggestions = vec![];
let inner = match style {
AttrStyle::Outer => "",
AttrStyle::Inner => "!",
let (start, end) = match style {
Some(AttrStyle::Outer) => ("#[", "]"),
Some(AttrStyle::Inner) => ("#![", "]"),
None => ("", ""),
};
if self.word {
suggestions.push(format!("#{inner}[{name}]"));
suggestions.push(format!("{start}{name}{end}"));
}
if let Some(descr) = self.list {
for descr in descr {
suggestions.push(format!("#{inner}[{name}({descr})]"));
suggestions.push(format!("{start}{name}({descr}){end}"));
}
}
suggestions.extend(self.one_of.iter().map(|&word| format!("#{inner}[{name}({word})]")));
suggestions.extend(self.one_of.iter().map(|&word| format!("{start}{name}({word}){end}")));
if let Some(descr) = self.name_value_str {
for descr in descr {
suggestions.push(format!("#{inner}[{name} = \"{descr}\"]"));
suggestions.push(format!("{start}{name} = \"{descr}\"{end}"));
}
}
suggestions.sort();

View file

@ -499,7 +499,7 @@ pub enum AttributeKind {
Coverage(Span, CoverageAttrKind),
/// Represents `#[crate_name = ...]`
CrateName { name: Symbol, name_span: Span, attr_span: Span, style: AttrStyle },
CrateName { name: Symbol, name_span: Span, attr_span: Span },
/// Represents `#[custom_mir]`.
CustomMir(Option<(MirDialect, Span)>, Option<(MirPhase, Span)>, Span),

View file

@ -302,6 +302,10 @@ pub enum DefPathData {
Ctor,
/// A constant expression (see `{ast,hir}::AnonConst`).
AnonConst,
/// A constant expression created during AST->HIR lowering..
LateAnonConst,
/// A fresh anonymous lifetime created by desugaring elided lifetimes.
DesugaredAnonymousLifetime,
/// An existential `impl Trait` type node.
/// Argument position `impl Trait` have a `TypeNs` with their pretty-printed name.
OpaqueTy,
@ -454,6 +458,8 @@ impl DefPathData {
TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name)
| OpaqueLifetime(name) => Some(name),
DesugaredAnonymousLifetime => Some(kw::UnderscoreLifetime),
Impl
| ForeignMod
| CrateRoot
@ -462,6 +468,7 @@ impl DefPathData {
| Closure
| Ctor
| AnonConst
| LateAnonConst
| OpaqueTy
| AnonAssocTy(..)
| SyntheticCoroutineBody
@ -475,6 +482,8 @@ impl DefPathData {
TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) | AnonAssocTy(name)
| OpaqueLifetime(name) => Some(name),
DesugaredAnonymousLifetime => Some(kw::UnderscoreLifetime),
Impl
| ForeignMod
| CrateRoot
@ -483,6 +492,7 @@ impl DefPathData {
| Closure
| Ctor
| AnonConst
| LateAnonConst
| OpaqueTy
| SyntheticCoroutineBody
| NestedStatic => None,
@ -502,7 +512,8 @@ impl DefPathData {
GlobalAsm => DefPathDataName::Anon { namespace: sym::global_asm },
Closure => DefPathDataName::Anon { namespace: sym::closure },
Ctor => DefPathDataName::Anon { namespace: sym::constructor },
AnonConst => DefPathDataName::Anon { namespace: sym::constant },
AnonConst | LateAnonConst => DefPathDataName::Anon { namespace: sym::constant },
DesugaredAnonymousLifetime => DefPathDataName::Named(kw::UnderscoreLifetime),
OpaqueTy => DefPathDataName::Anon { namespace: sym::opaque },
AnonAssocTy(..) => DefPathDataName::Anon { namespace: sym::anon_assoc },
SyntheticCoroutineBody => DefPathDataName::Anon { namespace: sym::synthetic },

View file

@ -1854,6 +1854,9 @@ pub enum TyPatKind<'hir> {
/// A range pattern (e.g., `1..=2` or `1..2`).
Range(&'hir ConstArg<'hir>, &'hir ConstArg<'hir>),
/// A pattern that excludes null pointers
NotNull,
/// A list of patterns where only one needs to be satisfied
Or(&'hir [TyPat<'hir>]),

View file

@ -725,7 +725,7 @@ pub fn walk_ty_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v TyPat<'v>)
try_visit!(visitor.visit_const_arg_unambig(upper_bound));
}
TyPatKind::Or(patterns) => walk_list!(visitor, visit_pattern_type_pattern, patterns),
TyPatKind::Err(_) => (),
TyPatKind::NotNull | TyPatKind::Err(_) => (),
}
V::Result::output()
}

View file

@ -168,6 +168,8 @@ language_item_table! {
MetaSized, sym::meta_sized, meta_sized_trait, Target::Trait, GenericRequirement::Exact(0);
PointeeSized, sym::pointee_sized, pointee_sized_trait, Target::Trait, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1);
AlignOf, sym::mem_align_const, align_const, Target::AssocConst, GenericRequirement::Exact(0);
SizeOf, sym::mem_size_const, size_const, Target::AssocConst, GenericRequirement::Exact(0);
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None;
Copy, sym::copy, copy_trait, Target::Trait, GenericRequirement::Exact(0);

View file

@ -103,6 +103,8 @@ hir_analysis_coerce_pointee_not_struct = `derive(CoercePointee)` is only applica
hir_analysis_coerce_pointee_not_transparent = `derive(CoercePointee)` is only applicable to `struct` with `repr(transparent)` layout
hir_analysis_coerce_same_pat_kind = only pattern types with the same pattern can be coerced between each other
hir_analysis_coerce_unsized_field_validity = for `{$ty}` to have a valid implementation of `{$trait_name}`, it must be possible to coerce the field of type `{$field_ty}`
.label = `{$field_ty}` must be a pointer, reference, or smart pointer that is allowed to be unsized

View file

@ -243,6 +243,18 @@ fn visit_implementation_of_dispatch_from_dyn(checker: &Checker<'_>) -> Result<()
// in the compiler (in particular, all the call ABI logic) will treat them as repr(transparent)
// even if they do not carry that attribute.
match (source.kind(), target.kind()) {
(&ty::Pat(_, pat_a), &ty::Pat(_, pat_b)) => {
if pat_a != pat_b {
return Err(tcx.dcx().emit_err(errors::CoerceSamePatKind {
span,
trait_name,
pat_a: pat_a.to_string(),
pat_b: pat_b.to_string(),
}));
}
Ok(())
}
(&ty::Ref(r_a, _, mutbl_a), ty::Ref(r_b, _, mutbl_b))
if r_a == *r_b && mutbl_a == *mutbl_b =>
{
@ -408,6 +420,18 @@ pub(crate) fn coerce_unsized_info<'tcx>(
(mt_a.ty, mt_b.ty, unsize_trait, None, span)
};
let (source, target, trait_def_id, kind, field_span) = match (source.kind(), target.kind()) {
(&ty::Pat(ty_a, pat_a), &ty::Pat(ty_b, pat_b)) => {
if pat_a != pat_b {
return Err(tcx.dcx().emit_err(errors::CoerceSamePatKind {
span,
trait_name,
pat_a: pat_a.to_string(),
pat_b: pat_b.to_string(),
}));
}
(ty_a, ty_b, coerce_unsized_trait, None, span)
}
(&ty::Ref(r_a, ty_a, mutbl_a), &ty::Ref(r_b, ty_b, mutbl_b)) => {
infcx.sub_regions(SubregionOrigin::RelateObjectBound(span), r_b, r_a);
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };

View file

@ -206,12 +206,8 @@ pub(crate) fn orphan_check_impl(
(LocalImpl::Disallow { problematic_kind }, NonlocalImpl::DisallowOther)
}
ty::Pat(..) => (
LocalImpl::Disallow { problematic_kind: "pattern type" },
NonlocalImpl::DisallowOther,
),
ty::Bool
| ty::Pat(..)
| ty::Char
| ty::Int(..)
| ty::Uint(..)

View file

@ -12,7 +12,9 @@ use tracing::{debug, instrument};
use super::ItemCtxt;
use super::predicates_of::assert_only_contains_predicates_from;
use crate::hir_ty_lowering::{HirTyLowerer, OverlappingAsssocItemConstraints, PredicateFilter};
use crate::hir_ty_lowering::{
HirTyLowerer, ImpliedBoundsContext, OverlappingAsssocItemConstraints, PredicateFilter,
};
/// For associated types we include both bounds written on the type
/// (`type X: Trait`) and predicates from the trait: `where Self::X: Trait`.
@ -52,15 +54,20 @@ fn associated_type_bounds<'tcx>(
| PredicateFilter::SelfTraitThatDefines(_)
| PredicateFilter::SelfAndAssociatedTypeBounds => {
// Implicit bounds are added to associated types unless a `?Trait` bound is found.
icx.lowerer().add_sizedness_bounds(
icx.lowerer().add_implicit_sizedness_bounds(
&mut bounds,
item_ty,
hir_bounds,
None,
None,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
span,
);
icx.lowerer().add_default_traits(
&mut bounds,
item_ty,
hir_bounds,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
span,
);
icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span);
// Also collect `where Self::Assoc: Trait` from the parent trait's where clauses.
let trait_def_id = tcx.local_parent(assoc_item_def_id);
@ -372,15 +379,20 @@ fn opaque_type_bounds<'tcx>(
| PredicateFilter::SelfOnly
| PredicateFilter::SelfTraitThatDefines(_)
| PredicateFilter::SelfAndAssociatedTypeBounds => {
icx.lowerer().add_sizedness_bounds(
icx.lowerer().add_implicit_sizedness_bounds(
&mut bounds,
item_ty,
hir_bounds,
None,
None,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
span,
);
icx.lowerer().add_default_traits(
&mut bounds,
item_ty,
hir_bounds,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
span,
);
icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span);
}
//`ConstIfConst` is only interested in `[const]` bounds.
PredicateFilter::ConstIfConst | PredicateFilter::SelfConstIfConst => {}

View file

@ -19,7 +19,8 @@ use crate::collect::ItemCtxt;
use crate::constrained_generic_params as cgp;
use crate::delegation::inherit_predicates_for_delegation_item;
use crate::hir_ty_lowering::{
HirTyLowerer, OverlappingAsssocItemConstraints, PredicateFilter, RegionInferReason,
HirTyLowerer, ImpliedBoundsContext, OverlappingAsssocItemConstraints, PredicateFilter,
RegionInferReason,
};
/// Returns a list of all type predicates (explicit and implicit) for the definition with
@ -189,19 +190,18 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
PredicateFilter::All,
OverlappingAsssocItemConstraints::Allowed,
);
icx.lowerer().add_sizedness_bounds(
icx.lowerer().add_implicit_sizedness_bounds(
&mut bounds,
tcx.types.self_param,
self_bounds,
None,
Some(def_id),
ImpliedBoundsContext::TraitDef(def_id),
span,
);
icx.lowerer().add_default_super_traits(
def_id,
icx.lowerer().add_default_traits(
&mut bounds,
tcx.types.self_param,
self_bounds,
hir_generics,
ImpliedBoundsContext::TraitDef(def_id),
span,
);
predicates.extend(bounds);
@ -229,19 +229,18 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
let param_ty = icx.lowerer().lower_ty_param(param.hir_id);
let mut bounds = Vec::new();
// Implicit bounds are added to type params unless a `?Trait` bound is found
icx.lowerer().add_sizedness_bounds(
icx.lowerer().add_implicit_sizedness_bounds(
&mut bounds,
param_ty,
&[],
Some((param.def_id, hir_generics.predicates)),
None,
ImpliedBoundsContext::TyParam(param.def_id, hir_generics.predicates),
param.span,
);
icx.lowerer().add_default_traits(
&mut bounds,
param_ty,
&[],
Some((param.def_id, hir_generics.predicates)),
ImpliedBoundsContext::TyParam(param.def_id, hir_generics.predicates),
param.span,
);
trace!(?bounds);
@ -676,11 +675,18 @@ pub(super) fn implied_predicates_with_filter<'tcx>(
| PredicateFilter::SelfOnly
| PredicateFilter::SelfTraitThatDefines(_)
| PredicateFilter::SelfAndAssociatedTypeBounds => {
icx.lowerer().add_default_super_traits(
trait_def_id,
icx.lowerer().add_implicit_sizedness_bounds(
&mut bounds,
self_param_ty,
superbounds,
generics,
ImpliedBoundsContext::TraitDef(trait_def_id),
item.span,
);
icx.lowerer().add_default_traits(
&mut bounds,
self_param_ty,
superbounds,
ImpliedBoundsContext::TraitDef(trait_def_id),
item.span,
);
}

View file

@ -1258,6 +1258,16 @@ pub(crate) struct CoerceUnsizedNonStruct {
pub trait_name: &'static str,
}
#[derive(Diagnostic)]
#[diag(hir_analysis_coerce_same_pat_kind)]
pub(crate) struct CoerceSamePatKind {
#[primary_span]
pub span: Span,
pub trait_name: &'static str,
pub pat_a: String,
pub pat_b: String,
}
#[derive(Diagnostic)]
#[diag(hir_analysis_coerce_unsized_may, code = E0377)]
pub(crate) struct CoerceSameStruct {

View file

@ -1,4 +1,3 @@
use std::assert_matches::assert_matches;
use std::ops::ControlFlow;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
@ -7,7 +6,7 @@ use rustc_errors::struct_span_code_err;
use rustc_hir as hir;
use rustc_hir::PolyTraitRef;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LocalDefId};
use rustc_hir::def_id::{CRATE_DEF_ID, DefId};
use rustc_middle::bug;
use rustc_middle::ty::{
self as ty, IsSuggestable, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
@ -18,11 +17,10 @@ use rustc_trait_selection::traits;
use smallvec::SmallVec;
use tracing::{debug, instrument};
use super::errors::GenericsArgsErrExtend;
use crate::errors;
use crate::hir_ty_lowering::{
AssocItemQSelf, FeedConstTy, HirTyLowerer, OverlappingAsssocItemConstraints, PredicateFilter,
RegionInferReason,
AssocItemQSelf, FeedConstTy, GenericsArgsErrExtend, HirTyLowerer, ImpliedBoundsContext,
OverlappingAsssocItemConstraints, PredicateFilter, RegionInferReason,
};
#[derive(Debug, Default)]
@ -62,7 +60,7 @@ impl CollectedSizednessBounds {
fn search_bounds_for<'tcx>(
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
mut f: impl FnMut(&'tcx PolyTraitRef<'tcx>),
) {
let mut search_bounds = |hir_bounds: &'tcx [hir::GenericBound<'tcx>]| {
@ -76,7 +74,7 @@ fn search_bounds_for<'tcx>(
};
search_bounds(hir_bounds);
if let Some((self_ty, where_clause)) = self_ty_where_predicates {
if let ImpliedBoundsContext::TyParam(self_ty, where_clause) = context {
for clause in where_clause {
if let hir::WherePredicateKind::BoundPredicate(pred) = clause.kind
&& pred.is_param_bound(self_ty.to_def_id())
@ -89,10 +87,10 @@ fn search_bounds_for<'tcx>(
fn collect_relaxed_bounds<'tcx>(
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
) -> SmallVec<[&'tcx PolyTraitRef<'tcx>; 1]> {
let mut relaxed_bounds: SmallVec<[_; 1]> = SmallVec::new();
search_bounds_for(hir_bounds, self_ty_where_predicates, |ptr| {
search_bounds_for(hir_bounds, context, |ptr| {
if matches!(ptr.modifiers.polarity, hir::BoundPolarity::Maybe(_)) {
relaxed_bounds.push(ptr);
}
@ -102,11 +100,11 @@ fn collect_relaxed_bounds<'tcx>(
fn collect_bounds<'a, 'tcx>(
hir_bounds: &'a [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
target_did: DefId,
) -> CollectedBound {
let mut collect_into = CollectedBound::default();
search_bounds_for(hir_bounds, self_ty_where_predicates, |ptr| {
search_bounds_for(hir_bounds, context, |ptr| {
if !matches!(ptr.trait_ref.path.res, Res::Def(DefKind::Trait, did) if did == target_did) {
return;
}
@ -123,17 +121,17 @@ fn collect_bounds<'a, 'tcx>(
fn collect_sizedness_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
span: Span,
) -> CollectedSizednessBounds {
let sized_did = tcx.require_lang_item(hir::LangItem::Sized, span);
let sized = collect_bounds(hir_bounds, self_ty_where_predicates, sized_did);
let sized = collect_bounds(hir_bounds, context, sized_did);
let meta_sized_did = tcx.require_lang_item(hir::LangItem::MetaSized, span);
let meta_sized = collect_bounds(hir_bounds, self_ty_where_predicates, meta_sized_did);
let meta_sized = collect_bounds(hir_bounds, context, meta_sized_did);
let pointee_sized_did = tcx.require_lang_item(hir::LangItem::PointeeSized, span);
let pointee_sized = collect_bounds(hir_bounds, self_ty_where_predicates, pointee_sized_did);
let pointee_sized = collect_bounds(hir_bounds, context, pointee_sized_did);
CollectedSizednessBounds { sized, meta_sized, pointee_sized }
}
@ -161,13 +159,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
/// bounds are present.
/// - On parameters, opaque type, associated types and trait aliases, add a `MetaSized` bound if
/// a `?Sized` bound is present.
pub(crate) fn add_sizedness_bounds(
pub(crate) fn add_implicit_sizedness_bounds(
&self,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
trait_did: Option<LocalDefId>,
context: ImpliedBoundsContext<'tcx>,
span: Span,
) {
let tcx = self.tcx();
@ -181,33 +178,36 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
let pointee_sized_did = tcx.require_lang_item(hir::LangItem::PointeeSized, span);
// If adding sizedness bounds to a trait, then there are some relevant early exits
if let Some(trait_did) = trait_did {
let trait_did = trait_did.to_def_id();
// Never add a default supertrait to `PointeeSized`.
if trait_did == pointee_sized_did {
return;
match context {
ImpliedBoundsContext::TraitDef(trait_did) => {
let trait_did = trait_did.to_def_id();
// Never add a default supertrait to `PointeeSized`.
if trait_did == pointee_sized_did {
return;
}
// Don't add default sizedness supertraits to auto traits because it isn't possible to
// relax an automatically added supertrait on the defn itself.
if tcx.trait_is_auto(trait_did) {
return;
}
}
// Don't add default sizedness supertraits to auto traits because it isn't possible to
// relax an automatically added supertrait on the defn itself.
if tcx.trait_is_auto(trait_did) {
return;
ImpliedBoundsContext::TyParam(..) | ImpliedBoundsContext::AssociatedTypeOrImplTrait => {
// Report invalid relaxed bounds.
// FIXME: Since we only call this validation function here in this function, we only
// fully validate relaxed bounds in contexts where we perform
// "sized elaboration". In most cases that doesn't matter because we *usually*
// reject such relaxed bounds outright during AST lowering.
// However, this can easily get out of sync! Ideally, we would perform this step
// where we are guaranteed to catch *all* bounds like in
// `Self::lower_poly_trait_ref`. List of concrete issues:
// FIXME(more_maybe_bounds): We don't call this for trait object tys, supertrait
// bounds, trait alias bounds, assoc type bounds (ATB)!
let bounds = collect_relaxed_bounds(hir_bounds, context);
self.reject_duplicate_relaxed_bounds(bounds);
}
} else {
// Report invalid relaxed bounds.
// FIXME: Since we only call this validation function here in this function, we only
// fully validate relaxed bounds in contexts where we perform
// "sized elaboration". In most cases that doesn't matter because we *usually*
// reject such relaxed bounds outright during AST lowering.
// However, this can easily get out of sync! Ideally, we would perform this step
// where we are guaranteed to catch *all* bounds like in
// `Self::lower_poly_trait_ref`. List of concrete issues:
// FIXME(more_maybe_bounds): We don't call this for trait object tys, supertrait
// bounds, trait alias bounds, assoc type bounds (ATB)!
let bounds = collect_relaxed_bounds(hir_bounds, self_ty_where_predicates);
self.reject_duplicate_relaxed_bounds(bounds);
}
let collected = collect_sizedness_bounds(tcx, hir_bounds, self_ty_where_predicates, span);
let collected = collect_sizedness_bounds(tcx, hir_bounds, context, span);
if (collected.sized.maybe || collected.sized.negative)
&& !collected.sized.positive
&& !collected.meta_sized.any()
@ -217,62 +217,33 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
// other explicit ones) - this can happen for trait aliases as well as bounds.
add_trait_bound(tcx, bounds, self_ty, meta_sized_did, span);
} else if !collected.any() {
if trait_did.is_some() {
// If there are no explicit sizedness bounds on a trait then add a default
// `MetaSized` supertrait.
add_trait_bound(tcx, bounds, self_ty, meta_sized_did, span);
} else {
// If there are no explicit sizedness bounds on a parameter then add a default
// `Sized` bound.
let sized_did = tcx.require_lang_item(hir::LangItem::Sized, span);
add_trait_bound(tcx, bounds, self_ty, sized_did, span);
match context {
ImpliedBoundsContext::TraitDef(..) => {
// If there are no explicit sizedness bounds on a trait then add a default
// `MetaSized` supertrait.
add_trait_bound(tcx, bounds, self_ty, meta_sized_did, span);
}
ImpliedBoundsContext::TyParam(..)
| ImpliedBoundsContext::AssociatedTypeOrImplTrait => {
// If there are no explicit sizedness bounds on a parameter then add a default
// `Sized` bound.
let sized_did = tcx.require_lang_item(hir::LangItem::Sized, span);
add_trait_bound(tcx, bounds, self_ty, sized_did, span);
}
}
}
}
/// Adds `experimental_default_bounds` bounds to the supertrait bounds.
pub(crate) fn add_default_super_traits(
&self,
trait_def_id: LocalDefId,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
hir_bounds: &'tcx [hir::GenericBound<'tcx>],
hir_generics: &'tcx hir::Generics<'tcx>,
span: Span,
) {
assert_matches!(self.tcx().def_kind(trait_def_id), DefKind::Trait | DefKind::TraitAlias);
// Supertraits for auto trait are unsound according to the unstable book:
// https://doc.rust-lang.org/beta/unstable-book/language-features/auto-traits.html#supertraits
if self.tcx().trait_is_auto(trait_def_id.to_def_id()) {
return;
}
self.add_default_traits(
bounds,
self.tcx().types.self_param,
hir_bounds,
Some((trait_def_id, hir_generics.predicates)),
span,
);
}
pub(crate) fn add_default_traits(
&self,
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
hir_bounds: &[hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
span: Span,
) {
self.tcx().default_traits().iter().for_each(|default_trait| {
self.add_default_trait(
*default_trait,
bounds,
self_ty,
hir_bounds,
self_ty_where_predicates,
span,
);
self.add_default_trait(*default_trait, bounds, self_ty, hir_bounds, context, span);
});
}
@ -285,15 +256,23 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
bounds: &mut Vec<(ty::Clause<'tcx>, Span)>,
self_ty: Ty<'tcx>,
hir_bounds: &[hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
span: Span,
) {
let tcx = self.tcx();
let trait_id = tcx.lang_items().get(trait_);
if let Some(trait_id) = trait_id
&& self.should_add_default_traits(trait_id, hir_bounds, self_ty_where_predicates)
// Supertraits for auto trait are unsound according to the unstable book:
// https://doc.rust-lang.org/beta/unstable-book/language-features/auto-traits.html#supertraits
if let ImpliedBoundsContext::TraitDef(trait_did) = context
&& self.tcx().trait_is_auto(trait_did.into())
{
add_trait_bound(tcx, bounds, self_ty, trait_id, span);
return;
}
if let Some(trait_did) = tcx.lang_items().get(trait_)
&& self.should_add_default_traits(trait_did, hir_bounds, context)
{
add_trait_bound(tcx, bounds, self_ty, trait_did, span);
}
}
@ -302,9 +281,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
&self,
trait_def_id: DefId,
hir_bounds: &'a [hir::GenericBound<'tcx>],
self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>,
context: ImpliedBoundsContext<'tcx>,
) -> bool {
let collected = collect_bounds(hir_bounds, self_ty_where_predicates, trait_def_id);
let collected = collect_bounds(hir_bounds, context, trait_def_id);
!self.tcx().has_attr(CRATE_DEF_ID, sym::rustc_no_implicit_bounds) && !collected.any()
}

View file

@ -4,9 +4,9 @@ use rustc_errors::codes::*;
use rustc_errors::{
Applicability, Diag, EmissionGuarantee, StashKey, Suggestions, struct_span_code_err,
};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, LangItem};
use rustc_lint_defs::builtin::{BARE_TRAIT_OBJECTS, UNUSED_ASSOCIATED_TYPE_BOUNDS};
use rustc_middle::ty::elaborate::ClauseWithSupertraitSpan;
use rustc_middle::ty::{
@ -24,7 +24,8 @@ use tracing::{debug, instrument};
use super::HirTyLowerer;
use crate::errors::SelfInTypeAlias;
use crate::hir_ty_lowering::{
GenericArgCountMismatch, OverlappingAsssocItemConstraints, PredicateFilter, RegionInferReason,
GenericArgCountMismatch, ImpliedBoundsContext, OverlappingAsssocItemConstraints,
PredicateFilter, RegionInferReason,
};
impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
@ -76,12 +77,26 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
.iter()
.map(|&trait_ref| hir::GenericBound::Trait(trait_ref))
.collect::<Vec<_>>(),
None,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
span,
);
let (elaborated_trait_bounds, elaborated_projection_bounds) =
let (mut elaborated_trait_bounds, elaborated_projection_bounds) =
traits::expand_trait_aliases(tcx, user_written_bounds.iter().copied());
// FIXME(sized-hierarchy): https://github.com/rust-lang/rust/pull/142712#issuecomment-3013231794
debug!(?user_written_bounds, ?elaborated_trait_bounds);
let meta_sized_did = tcx.require_lang_item(LangItem::MetaSized, span);
// Don't strip out `MetaSized` when the user wrote it explicitly, only when it was
// elaborated
if user_written_bounds
.iter()
.all(|(clause, _)| clause.as_trait_clause().map(|p| p.def_id()) != Some(meta_sized_did))
{
elaborated_trait_bounds.retain(|(pred, _)| pred.def_id() != meta_sized_did);
}
debug!(?user_written_bounds, ?elaborated_trait_bounds);
let (regular_traits, mut auto_traits): (Vec<_>, Vec<_>) = elaborated_trait_bounds
.into_iter()
.partition(|(trait_ref, _)| !tcx.trait_is_auto(trait_ref.def_id()));

View file

@ -56,6 +56,19 @@ use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_
use crate::hir_ty_lowering::generics::{check_generic_arg_count, lower_generic_args};
use crate::middle::resolve_bound_vars as rbv;
/// The context in which an implied bound is being added to a item being lowered (i.e. a sizedness
/// trait or a default trait)
#[derive(Clone, Copy)]
pub(crate) enum ImpliedBoundsContext<'tcx> {
/// An implied bound is added to a trait definition (i.e. a new supertrait), used when adding
/// a default `MetaSized` supertrait
TraitDef(LocalDefId),
/// An implied bound is added to a type parameter
TyParam(LocalDefId, &'tcx [hir::WherePredicate<'tcx>]),
/// An implied bound being added in any other context
AssociatedTypeOrImplTrait,
}
/// A path segment that is semantically allowed to have generic arguments.
#[derive(Debug)]
pub struct GenericPathSegment(pub DefId, pub usize);
@ -2513,12 +2526,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
PredicateFilter::All,
OverlappingAsssocItemConstraints::Allowed,
);
self.add_sizedness_bounds(
self.add_implicit_sizedness_bounds(
&mut bounds,
self_ty,
hir_bounds,
None,
None,
ImpliedBoundsContext::AssociatedTypeOrImplTrait,
hir_ty.span,
);
self.register_trait_ascription_bounds(bounds, hir_ty.hir_id, hir_ty.span);
@ -2611,6 +2623,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
.span_delayed_bug(ty_span, "invalid base type for range pattern")),
}
}
hir::TyPatKind::NotNull => Ok(ty::PatternKind::NotNull),
hir::TyPatKind::Or(patterns) => {
self.tcx()
.mk_patterns_from_iter(patterns.iter().map(|pat| {

View file

@ -340,6 +340,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
self.add_constraints_from_const(current, start, variance);
self.add_constraints_from_const(current, end, variance);
}
ty::PatternKind::NotNull => {}
ty::PatternKind::Or(patterns) => {
for pat in patterns {
self.add_constraints_from_pat(current, variance, pat)

View file

@ -1888,6 +1888,10 @@ impl<'a> State<'a> {
self.word("..=");
self.print_const_arg(end);
}
TyPatKind::NotNull => {
self.word_space("not");
self.word("null");
}
TyPatKind::Or(patterns) => {
self.popen();
let mut first = true;

View file

@ -1,7 +1,6 @@
//! A utility module to inspect currently ambiguous obligations in the current context.
use rustc_infer::traits::{self, ObligationCause, PredicateObligations};
use rustc_middle::traits::solve::GoalSource;
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
use rustc_span::Span;
use rustc_trait_selection::solve::Certainty;
@ -127,21 +126,7 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for NestedObligationsForSelfTy<'a, 'tcx> {
let tcx = self.fcx.tcx;
let goal = inspect_goal.goal();
if self.fcx.predicate_has_self_ty(goal.predicate, self.self_ty)
// We do not push the instantiated forms of goals as it would cause any
// aliases referencing bound vars to go from having escaping bound vars to
// being able to be normalized to an inference variable.
//
// This is mostly just a hack as arbitrary nested goals could still contain
// such aliases while having a different `GoalSource`. Closure signature inference
// however can't really handle *every* higher ranked `Fn` goal also being present
// in the form of `?c: Fn<(<?x as Trait<'!a>>::Assoc)`.
//
// This also just better matches the behaviour of the old solver where we do not
// encounter instantiated forms of goals, only nested goals that referred to bound
// vars from instantiated goals.
&& !matches!(inspect_goal.source(), GoalSource::InstantiateHigherRanked)
{
if self.fcx.predicate_has_self_ty(goal.predicate, self.self_ty) {
self.obligations_for_self_ty.push(traits::Obligation::new(
tcx,
self.root_cause.clone(),

View file

@ -1953,7 +1953,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
}
self.suggest_derive(diag, &[(trait_ref.upcast(self.tcx), None, None)]);
self.suggest_derive(diag, &vec![(trait_ref.upcast(self.tcx), None, None)]);
}
}
}

View file

@ -26,6 +26,7 @@ use tracing::{debug, instrument};
pub(crate) use self::MethodError::*;
use self::probe::{IsSuggestion, ProbeScope};
use crate::FnCtxt;
use crate::method::probe::UnsatisfiedPredicates;
#[derive(Clone, Copy, Debug)]
pub(crate) struct MethodCallee<'tcx> {
@ -71,8 +72,7 @@ pub(crate) enum MethodError<'tcx> {
#[derive(Debug)]
pub(crate) struct NoMatchData<'tcx> {
pub static_candidates: Vec<CandidateSource>,
pub unsatisfied_predicates:
Vec<(ty::Predicate<'tcx>, Option<ty::Predicate<'tcx>>, Option<ObligationCause<'tcx>>)>,
pub unsatisfied_predicates: UnsatisfiedPredicates<'tcx>,
pub out_of_scope_traits: Vec<DefId>,
pub similar_candidate: Option<ty::AssocItem>,
pub mode: probe::Mode,

View file

@ -165,13 +165,12 @@ struct PickDiagHints<'a, 'tcx> {
/// Collects near misses when trait bounds for type parameters are unsatisfied and is only used
/// for error reporting
unsatisfied_predicates: &'a mut Vec<(
ty::Predicate<'tcx>,
Option<ty::Predicate<'tcx>>,
Option<ObligationCause<'tcx>>,
)>,
unsatisfied_predicates: &'a mut UnsatisfiedPredicates<'tcx>,
}
pub(crate) type UnsatisfiedPredicates<'tcx> =
Vec<(ty::Predicate<'tcx>, Option<ty::Predicate<'tcx>>, Option<ObligationCause<'tcx>>)>;
/// Criteria to apply when searching for a given Pick. This is used during
/// the search for potentially shadowed methods to ensure we don't search
/// more candidates than strictly necessary.
@ -1212,11 +1211,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
fn pick_core(
&self,
unsatisfied_predicates: &mut Vec<(
ty::Predicate<'tcx>,
Option<ty::Predicate<'tcx>>,
Option<ObligationCause<'tcx>>,
)>,
unsatisfied_predicates: &mut UnsatisfiedPredicates<'tcx>,
) -> Option<PickResult<'tcx>> {
// Pick stable methods only first, and consider unstable candidates if not found.
self.pick_all_method(&mut PickDiagHints {
@ -1889,11 +1884,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self_ty: Ty<'tcx>,
instantiate_self_ty_obligations: &[PredicateObligation<'tcx>],
probe: &Candidate<'tcx>,
possibly_unsatisfied_predicates: &mut Vec<(
ty::Predicate<'tcx>,
Option<ty::Predicate<'tcx>>,
Option<ObligationCause<'tcx>>,
)>,
possibly_unsatisfied_predicates: &mut UnsatisfiedPredicates<'tcx>,
) -> ProbeResult {
self.probe(|snapshot| {
let outer_universe = self.universe();

File diff suppressed because it is too large Load diff

View file

@ -50,3 +50,13 @@ macro_rules! static_assert_size {
const _: (usize, usize) = ($size, ::std::mem::size_of::<$ty>());
};
}
#[macro_export]
macro_rules! indexvec {
($expr:expr; $n:expr) => {
IndexVec::from_raw(vec![$expr; $n])
};
($($expr:expr),* $(,)?) => {
IndexVec::from_raw(vec![$($expr),*])
};
}

View file

@ -376,12 +376,6 @@ pub struct Config {
/// enabled. Makes it so that "please report a bug" is hidden, as ICEs with
/// internal features are wontfix, and they are usually the cause of the ICEs.
pub using_internal_features: &'static std::sync::atomic::AtomicBool,
/// All commandline args used to invoke the compiler, with @file args fully expanded.
/// This will only be used within debug info, e.g. in the pdb file on windows
/// This is mainly useful for other tools that reads that debuginfo to figure out
/// how to call the compiler with the same arguments.
pub expanded_args: Vec<String>,
}
/// Initialize jobserver before getting `jobserver::client` and `build_session`.
@ -480,7 +474,6 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
util::rustc_version_str().unwrap_or("unknown"),
config.ice_file,
config.using_internal_features,
config.expanded_args,
);
codegen_backend.init(&sess);

View file

@ -78,7 +78,6 @@ where
"",
None,
&USING_INTERNAL_FEATURES,
Default::default(),
);
let cfg = parse_cfg(sess.dcx(), matches.opt_strs("cfg"));
let cfg = build_configuration(&sess, cfg);

View file

@ -2959,7 +2959,7 @@ impl<'tcx> LateLintPass<'tcx> for AsmLabels {
for c in chars {
// Inside a template format arg, any character is permitted for the
// puproses of label detection because we assume that it can be
// purposes of label detection because we assume that it can be
// replaced with some other valid label string later. `options(raw)`
// asm blocks cannot have format args, so they are excluded from this
// special case.

View file

@ -758,6 +758,7 @@ fn pat_ty_is_known_nonnull<'tcx>(
// to ensure we aren't wrapping over zero.
start > 0 && end >= start
}
ty::PatternKind::NotNull => true,
ty::PatternKind::Or(patterns) => {
patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
}
@ -918,7 +919,9 @@ fn get_nullable_type_from_pat<'tcx>(
pat: ty::Pattern<'tcx>,
) -> Option<Ty<'tcx>> {
match *pat {
ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, base),
ty::PatternKind::NotNull | ty::PatternKind::Range { .. } => {
get_nullable_type(tcx, typing_env, base)
}
ty::PatternKind::Or(patterns) => {
let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
for &pat in &patterns[1..] {

View file

@ -271,9 +271,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
bool TrapUnreachable, bool Singlethread, bool VerboseAsm,
bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray,
const char *SplitDwarfFile, const char *OutputObjFile,
const char *DebugInfoCompression, bool UseEmulatedTls, const char *Argv0,
size_t Argv0Len, const char *CommandLineArgs, size_t CommandLineArgsLen,
bool UseWasmEH) {
const char *DebugInfoCompression, bool UseEmulatedTls, bool UseWasmEH) {
auto OptLevel = fromRust(RustOptLevel);
auto RM = fromRust(RustReloc);
@ -348,11 +346,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
Options.EmitStackSizeSection = EmitStackSizeSection;
if (Argv0 != nullptr)
Options.MCOptions.Argv0 = {Argv0, Argv0Len};
if (CommandLineArgs != nullptr)
Options.MCOptions.CommandlineArgs = {CommandLineArgs, CommandLineArgsLen};
#if LLVM_VERSION_GE(21, 0)
TargetMachine *TM = TheTarget->createTargetMachine(Trip, CPU, Feature,
Options, RM, CM, OptLevel);
@ -1141,8 +1134,8 @@ struct LLVMRustThinLTOModule {
// This is copied from `lib/LTO/ThinLTOCodeGenerator.cpp`, not sure what it
// does.
static const GlobalValueSummary *
getFirstDefinitionForLinker(const GlobalValueSummaryList &GVSummaryList) {
static const GlobalValueSummary *getFirstDefinitionForLinker(
ArrayRef<std::unique_ptr<GlobalValueSummary>> GVSummaryList) {
auto StrongDefForLinker = llvm::find_if(
GVSummaryList, [](const std::unique_ptr<GlobalValueSummary> &Summary) {
auto Linkage = Summary->linkage();
@ -1220,9 +1213,13 @@ LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules, size_t num_modules,
// being lifted from `lib/LTO/LTO.cpp` as well
DenseMap<GlobalValue::GUID, const GlobalValueSummary *> PrevailingCopy;
for (auto &I : Ret->Index) {
if (I.second.SummaryList.size() > 1)
PrevailingCopy[I.first] =
getFirstDefinitionForLinker(I.second.SummaryList);
#if LLVM_VERSION_GE(22, 0)
const auto &SummaryList = I.second.getSummaryList();
#else
const auto &SummaryList = I.second.SummaryList;
#endif
if (SummaryList.size() > 1)
PrevailingCopy[I.first] = getFirstDefinitionForLinker(SummaryList);
}
auto isPrevailing = [&](GlobalValue::GUID GUID, const GlobalValueSummary *S) {
const auto &Prevailing = PrevailingCopy.find(GUID);
@ -1253,7 +1250,12 @@ LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules, size_t num_modules,
// linkage will stay as external, and internal will stay as internal.
std::set<GlobalValue::GUID> ExportedGUIDs;
for (auto &List : Ret->Index) {
for (auto &GVS : List.second.SummaryList) {
#if LLVM_VERSION_GE(22, 0)
const auto &SummaryList = List.second.getSummaryList();
#else
const auto &SummaryList = List.second.SummaryList;
#endif
for (auto &GVS : SummaryList) {
if (GlobalValue::isLocalLinkage(GVS->linkage()))
continue;
auto GUID = GVS->getOriginalName();

View file

@ -245,6 +245,7 @@ enum class LLVMRustAttributeKind {
DeadOnUnwind = 43,
DeadOnReturn = 44,
CapturesReadOnly = 45,
CapturesNone = 46,
};
static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) {
@ -339,6 +340,7 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) {
#endif
case LLVMRustAttributeKind::CapturesAddress:
case LLVMRustAttributeKind::CapturesReadOnly:
case LLVMRustAttributeKind::CapturesNone:
report_fatal_error("Should be handled separately");
}
report_fatal_error("bad LLVMRustAttributeKind");
@ -390,6 +392,9 @@ extern "C" void LLVMRustEraseInstFromParent(LLVMValueRef Instr) {
extern "C" LLVMAttributeRef
LLVMRustCreateAttrNoValue(LLVMContextRef C, LLVMRustAttributeKind RustAttr) {
#if LLVM_VERSION_GE(21, 0)
if (RustAttr == LLVMRustAttributeKind::CapturesNone) {
return wrap(Attribute::getWithCaptureInfo(*unwrap(C), CaptureInfo::none()));
}
if (RustAttr == LLVMRustAttributeKind::CapturesAddress) {
return wrap(Attribute::getWithCaptureInfo(
*unwrap(C), CaptureInfo(CaptureComponents::Address)));

View file

@ -2,19 +2,21 @@ use rustc_macros::{Decodable, Encodable, HashStable};
use crate::ty::{Ty, TyCtxt, TypingEnv};
/// Flags that dictate how a parameter is mutated. If the flags are empty, the param is
/// read-only. If non-empty, it is read-only if *all* flags' conditions are met.
/// Summarizes how a parameter (a return place or an argument) is used inside a MIR body.
#[derive(Clone, Copy, PartialEq, Debug, Decodable, Encodable, HashStable)]
pub struct DeducedReadOnlyParam(u8);
pub struct UsageSummary(u8);
bitflags::bitflags! {
impl DeducedReadOnlyParam: u8 {
/// This parameter is dropped. It is read-only if `!needs_drop`.
const IF_NO_DROP = 1 << 0;
/// This parameter is borrowed. It is read-only if `Freeze`.
const IF_FREEZE = 1 << 1;
/// This parameter is mutated. It is never read-only.
const MUTATED = 1 << 2;
impl UsageSummary: u8 {
/// This parameter is dropped when it `needs_drop`.
const DROP = 1 << 0;
/// There is a shared borrow to this parameter.
/// It allows for mutation unless parameter is `Freeze`.
const SHARED_BORROW = 1 << 1;
/// This parameter is mutated (excluding through a drop or a shared borrow).
const MUTATE = 1 << 2;
/// This parameter is captured (excluding through a drop).
const CAPTURE = 1 << 3;
}
}
@ -24,43 +26,53 @@ bitflags::bitflags! {
/// These can be useful for optimization purposes when a function is directly called. We compute
/// them and store them into the crate metadata so that downstream crates can make use of them.
///
/// Right now, we only have `read_only`, but `no_capture` and `no_alias` might be useful in the
/// Right now, we have `readonly` and `captures(none)`, but `no_alias` might be useful in the
/// future.
#[derive(Clone, Copy, PartialEq, Debug, Decodable, Encodable, HashStable)]
pub struct DeducedParamAttrs {
/// The parameter is marked immutable in the function.
pub read_only: DeducedReadOnlyParam,
}
// By default, consider the parameters to be mutated.
impl Default for DeducedParamAttrs {
#[inline]
fn default() -> DeducedParamAttrs {
DeducedParamAttrs { read_only: DeducedReadOnlyParam::MUTATED }
}
pub usage: UsageSummary,
}
impl DeducedParamAttrs {
/// Returns true if no attributes have been deduced.
#[inline]
pub fn is_default(self) -> bool {
self.read_only.contains(DeducedReadOnlyParam::MUTATED)
self.usage.contains(UsageSummary::MUTATE | UsageSummary::CAPTURE)
}
/// For parameters passed indirectly, returns true if pointer is never written through.
pub fn read_only<'tcx>(
&self,
tcx: TyCtxt<'tcx>,
typing_env: TypingEnv<'tcx>,
ty: Ty<'tcx>,
) -> bool {
let read_only = self.read_only;
// We have to check *all* set bits; only if all checks pass is this truly read-only.
if read_only.contains(DeducedReadOnlyParam::MUTATED) {
// Only if all checks pass is this truly read-only.
if self.usage.contains(UsageSummary::MUTATE) {
return false;
}
if read_only.contains(DeducedReadOnlyParam::IF_NO_DROP) && ty.needs_drop(tcx, typing_env) {
if self.usage.contains(UsageSummary::DROP) && ty.needs_drop(tcx, typing_env) {
return false;
}
if read_only.contains(DeducedReadOnlyParam::IF_FREEZE) && !ty.is_freeze(tcx, typing_env) {
if self.usage.contains(UsageSummary::SHARED_BORROW) && !ty.is_freeze(tcx, typing_env) {
return false;
}
true
}
/// For parameters passed indirectly, returns true if pointer is not captured, i.e., its
/// address is not captured, and pointer is used neither for reads nor writes after function
/// returns.
pub fn captures_none<'tcx>(
&self,
tcx: TyCtxt<'tcx>,
typing_env: TypingEnv<'tcx>,
ty: Ty<'tcx>,
) -> bool {
if self.usage.contains(UsageSummary::CAPTURE) {
return false;
}
if self.usage.contains(UsageSummary::DROP) && ty.needs_drop(tcx, typing_env) {
return false;
}
true

View file

@ -1092,8 +1092,6 @@ impl<'tcx> Debug for Rvalue<'tcx> {
NullaryOp(ref op, ref t) => {
let t = with_no_trimmed_paths!(format!("{}", t));
match op {
NullOp::SizeOf => write!(fmt, "SizeOf({t})"),
NullOp::AlignOf => write!(fmt, "AlignOf({t})"),
NullOp::OffsetOf(fields) => write!(fmt, "OffsetOf({t}, {fields:?})"),
NullOp::UbChecks => write!(fmt, "UbChecks()"),
NullOp::ContractChecks => write!(fmt, "ContractChecks()"),

View file

@ -597,6 +597,18 @@ impl<'tcx> Operand<'tcx> {
}))
}
/// Convenience helper to make a constant that refers to the given `DefId` and args. Since this
/// is used to synthesize MIR, assumes `user_ty` is None.
pub fn unevaluated_constant(
tcx: TyCtxt<'tcx>,
def_id: DefId,
args: &[GenericArg<'tcx>],
span: Span,
) -> Self {
let const_ = Const::from_unevaluated(tcx, def_id).instantiate(tcx, args);
Operand::Constant(Box::new(ConstOperand { span, user_ty: None, const_ }))
}
pub fn is_move(&self) -> bool {
matches!(self, Operand::Move(..))
}
@ -782,9 +794,7 @@ impl<'tcx> Rvalue<'tcx> {
op.ty(tcx, arg_ty)
}
Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => {
tcx.types.usize
}
Rvalue::NullaryOp(NullOp::OffsetOf(..), _) => tcx.types.usize,
Rvalue::NullaryOp(NullOp::ContractChecks, _)
| Rvalue::NullaryOp(NullOp::UbChecks, _) => tcx.types.bool,
Rvalue::Aggregate(ref ak, ref ops) => match **ak {
@ -853,7 +863,7 @@ impl BorrowKind {
impl<'tcx> NullOp<'tcx> {
pub fn ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self {
NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(_) => tcx.types.usize,
NullOp::OffsetOf(_) => tcx.types.usize,
NullOp::UbChecks | NullOp::ContractChecks => tcx.types.bool,
}
}

View file

@ -1563,10 +1563,6 @@ pub enum AggregateKind<'tcx> {
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum NullOp<'tcx> {
/// Returns the size of a value of that type
SizeOf,
/// Returns the minimum alignment of a type
AlignOf,
/// Returns the offset of a field
OffsetOf(&'tcx List<(VariantIdx, FieldIdx)>),
/// Returns whether we should perform some UB-checking at runtime.

View file

@ -501,6 +501,8 @@ pub use helper::*;
mod helper {
use super::*;
// Note: the methods below use a `slice.chain(Option).chain(Option)` pattern so that all paths
// produce an iterator with the same concrete type.
pub type Successors<'a> = impl DoubleEndedIterator<Item = BasicBlock> + 'a;
impl SwitchTargets {
@ -510,7 +512,7 @@ mod helper {
#[define_opaque(Successors)]
pub fn successors_for_value(&self, value: u128) -> Successors<'_> {
let target = self.target_for_value(value);
(&[]).into_iter().copied().chain(Some(target).into_iter().chain(None))
(&[]).into_iter().copied().chain(Some(target)).chain(None)
}
}
@ -522,10 +524,7 @@ mod helper {
match *self {
// 3-successors for async drop: target, unwind, dropline (parent coroutine drop)
Drop { target: ref t, unwind: UnwindAction::Cleanup(u), drop: Some(d), .. } => {
slice::from_ref(t)
.into_iter()
.copied()
.chain(Some(u).into_iter().chain(Some(d)))
slice::from_ref(t).into_iter().copied().chain(Some(u)).chain(Some(d))
}
// 2-successors
Call { target: Some(ref t), unwind: UnwindAction::Cleanup(u), .. }
@ -534,7 +533,7 @@ mod helper {
| Drop { target: ref t, unwind: _, drop: Some(u), .. }
| Assert { target: ref t, unwind: UnwindAction::Cleanup(u), .. }
| FalseUnwind { real_target: ref t, unwind: UnwindAction::Cleanup(u) } => {
slice::from_ref(t).into_iter().copied().chain(Some(u).into_iter().chain(None))
slice::from_ref(t).into_iter().copied().chain(Some(u)).chain(None)
}
// single successor
Goto { target: ref t }
@ -544,7 +543,7 @@ mod helper {
| Drop { target: ref t, unwind: _, .. }
| Assert { target: ref t, unwind: _, .. }
| FalseUnwind { real_target: ref t, unwind: _ } => {
slice::from_ref(t).into_iter().copied().chain(None.into_iter().chain(None))
slice::from_ref(t).into_iter().copied().chain(None).chain(None)
}
// No successors
UnwindResume
@ -554,23 +553,24 @@ mod helper {
| Unreachable
| TailCall { .. }
| Call { target: None, unwind: _, .. } => {
(&[]).into_iter().copied().chain(None.into_iter().chain(None))
(&[]).into_iter().copied().chain(None).chain(None)
}
// Multiple successors
InlineAsm { ref targets, unwind: UnwindAction::Cleanup(u), .. } => {
targets.iter().copied().chain(Some(u).into_iter().chain(None))
targets.iter().copied().chain(Some(u)).chain(None)
}
InlineAsm { ref targets, unwind: _, .. } => {
targets.iter().copied().chain(None.into_iter().chain(None))
targets.iter().copied().chain(None).chain(None)
}
SwitchInt { ref targets, .. } => {
targets.targets.iter().copied().chain(None.into_iter().chain(None))
targets.targets.iter().copied().chain(None).chain(None)
}
// FalseEdge
FalseEdge { ref real_target, imaginary_target } => slice::from_ref(real_target)
.into_iter()
.copied()
.chain(Some(imaginary_target).into_iter().chain(None)),
.chain(Some(imaginary_target))
.chain(None),
}
}

View file

@ -160,6 +160,9 @@ pub enum SelectionCandidate<'tcx> {
/// types generated for a fn pointer type (e.g., `fn(int) -> int`)
FnPointerCandidate,
/// Builtin impl of the `PointerLike` trait.
PointerLikeCandidate,
TraitAliasCandidate,
/// Matching `dyn Trait` with a supertrait of `Trait`. The index is the

Some files were not shown because too many files have changed in this diff Show more