Auto merge of #145601 - jieyouxu:rollup-t5mbqhc, r=jieyouxu

Rollup of 10 pull requests

Successful merges:

 - rust-lang/rust#145538 (bufreader::Buffer::backshift: don't move the uninit bytes)
 - rust-lang/rust#145542 (triagebot: Don't warn no-mentions on subtree updates)
 - rust-lang/rust#145549 (Update rust maintainers in openharmony.md)
 - rust-lang/rust#145550 (Avoid using `()` in `derive(From)` output.)
 - rust-lang/rust#145556 (Allow stability attributes on extern crates)
 - rust-lang/rust#145560 (Remove unused `PartialOrd`/`Ord` from bootstrap)
 - rust-lang/rust#145568 (ignore frontmatters in `TokenStream::new`)
 - rust-lang/rust#145571 (remove myself from some adhoc-groups and pings)
 - rust-lang/rust#145576 (Add change tracker entry for `--timings`)
 - rust-lang/rust#145578 (Add VEXos "linked files" support to `armv7a-vex-v5`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-08-19 23:52:06 +00:00
commit f605b57042
22 changed files with 127 additions and 133 deletions

View file

@ -54,6 +54,7 @@ const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Static),
Allow(Target::ForeignFn),
Allow(Target::ForeignStatic),
Allow(Target::ExternCrate),
]);
#[derive(Default)]

View file

@ -27,21 +27,39 @@ pub(crate) fn expand_deriving_from(
cx.dcx().bug("derive(From) used on something else than an item");
};
// #[derive(From)] is currently usable only on structs with exactly one field.
let field = if let ItemKind::Struct(_, _, data) = &item.kind
&& let [field] = data.fields()
{
Some(field.clone())
} else {
None
let err_span = || {
let item_span = item.kind.ident().map(|ident| ident.span).unwrap_or(item.span);
MultiSpan::from_spans(vec![span, item_span])
};
let from_type = match &field {
Some(field) => Ty::AstTy(field.ty.clone()),
// We don't have a type to put into From<...> if we don't have a single field, so just put
// unit there.
None => Ty::Unit,
// `#[derive(From)]` is currently usable only on structs with exactly one field.
let field = match &item.kind {
ItemKind::Struct(_, _, data) => {
if let [field] = data.fields() {
Ok(field.clone())
} else {
let guar = cx.dcx().emit_err(errors::DeriveFromWrongFieldCount {
span: err_span(),
multiple_fields: data.fields().len() > 1,
});
Err(guar)
}
}
ItemKind::Enum(_, _, _) | ItemKind::Union(_, _, _) => {
let guar = cx.dcx().emit_err(errors::DeriveFromWrongTarget {
span: err_span(),
kind: &format!("{} {}", item.kind.article(), item.kind.descr()),
});
Err(guar)
}
_ => cx.dcx().bug("Invalid derive(From) ADT input"),
};
let from_type = Ty::AstTy(match field {
Ok(ref field) => field.ty.clone(),
Err(guar) => cx.ty(span, ast::TyKind::Err(guar)),
});
let path =
Path::new_(pathvec_std!(convert::From), vec![Box::new(from_type.clone())], PathKind::Std);
@ -71,34 +89,17 @@ pub(crate) fn expand_deriving_from(
attributes: thin_vec![cx.attr_word(sym::inline, span)],
fieldless_variants_strategy: FieldlessVariantsStrategy::Default,
combine_substructure: combine_substructure(Box::new(|cx, span, substructure| {
let Some(field) = &field else {
let item_span = item.kind.ident().map(|ident| ident.span).unwrap_or(item.span);
let err_span = MultiSpan::from_spans(vec![span, item_span]);
let error = match &item.kind {
ItemKind::Struct(_, _, data) => {
cx.dcx().emit_err(errors::DeriveFromWrongFieldCount {
span: err_span,
multiple_fields: data.fields().len() > 1,
})
}
ItemKind::Enum(_, _, _) | ItemKind::Union(_, _, _) => {
cx.dcx().emit_err(errors::DeriveFromWrongTarget {
span: err_span,
kind: &format!("{} {}", item.kind.article(), item.kind.descr()),
})
}
_ => cx.dcx().bug("Invalid derive(From) ADT input"),
};
return BlockOrExpr::new_expr(DummyResult::raw_expr(span, Some(error)));
let field = match field {
Ok(ref field) => field,
Err(guar) => {
return BlockOrExpr::new_expr(DummyResult::raw_expr(span, Some(guar)));
}
};
let self_kw = Ident::new(kw::SelfUpper, span);
let expr: Box<ast::Expr> = match substructure.fields {
SubstructureFields::StaticStruct(variant, _) => match variant {
// Self {
// field: value
// }
// Self { field: value }
VariantData::Struct { .. } => cx.expr_struct_ident(
span,
self_kw,

View file

@ -2435,6 +2435,13 @@ fn linker_with_args(
// Passed after compiler-generated options to support manual overriding when necessary.
add_user_defined_link_args(cmd, sess);
// ------------ Builtin configurable linker scripts ------------
// The user's link args should be able to overwrite symbols in the compiler's
// linker script that were weakly defined (i.e. defined with `PROVIDE()`). For this
// to work correctly, the user needs to be able to specify linker arguments like
// `--defsym` and `--script` *before* any builtin linker scripts are evaluated.
add_link_script(cmd, sess, tmpdir, crate_type);
// ------------ Object code and libraries, order-dependent ------------
// Post-link CRT objects.
@ -2469,8 +2476,6 @@ fn add_order_independent_options(
let apple_sdk_root = add_apple_sdk(cmd, sess, flavor);
add_link_script(cmd, sess, tmpdir, crate_type);
if sess.target.os == "fuchsia"
&& crate_type == CrateType::Executable
&& !matches!(flavor, LinkerFlavor::Gnu(Cc::Yes, _))

View file

@ -49,6 +49,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
mut src: &'src str,
mut start_pos: BytePos,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
@ -56,7 +57,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
start_pos = start_pos + BytePos::from_usize(shebang_len);
}
let cursor = Cursor::new(src, FrontmatterAllowed::Yes);
let cursor = Cursor::new(src, frontmatter_allowed);
let mut lexer = Lexer {
psess,
start_pos,

View file

@ -20,6 +20,7 @@ use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span};
@ -146,7 +147,7 @@ fn new_parser_from_source_file(
source_file: Arc<SourceFile>,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None)?;
let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@ -161,7 +162,9 @@ pub fn source_str_to_stream(
override_span: Option<Span>,
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
source_file_to_stream(psess, source_file, override_span)
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters.
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
}
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@ -170,6 +173,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Arc<SourceFile>,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!(
@ -178,7 +182,13 @@ fn source_file_to_stream<'psess>(
));
});
lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
lexer::lex_token_trees(
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
}
/// Runs the given subparser `f` on the tokens of the given `attr`'s item.

View file

@ -17,15 +17,25 @@ PROVIDE(__vcodesig_type = 0); /* V5_SIG_TYPE_USER */
PROVIDE(__vcodesig_owner = 2); /* V5_SIG_OWNER_PARTNER */
PROVIDE(__vcodesig_options = 0); /* none (0) */
PROVIDE(__user_ram_start = 0x03800000);
PROVIDE(__user_ram_length = 48M);
PROVIDE(__user_ram_end = __user_ram_start + __user_ram_length); /* 0x8000000 */
__user_ram_start = 0x03800000;
__user_ram_end = 0x08000000;
/* (0x48 =) 72 MiB length */
__user_ram_length = __user_ram_start - __user_ram_end;
PROVIDE(__code_signature_length = 0x20);
/*
* VEXos provides a method for pre-loading a "linked file" at a specified
* address in User RAM, conventionally near the end, after the primary
* program binary. We need to be sure not to place any data in that location,
* so we allow the user of this linker script to inform the start address of
* this blob.
*/
PROVIDE(__linked_file_length = 0);
PROVIDE(__linked_file_end = __user_ram_end);
PROVIDE(__linked_file_start = __linked_file_end - __linked_file_length);
PROVIDE(__stack_length = 4M);
PROVIDE(__heap_end = __user_ram_end - __stack_length);
PROVIDE(__user_length = __heap_start - __user_ram_start);
PROVIDE(__stack_top = __linked_file_start);
PROVIDE(__stack_bottom = __linked_file_start - __stack_length);
MEMORY {
USER_RAM (RWX) : ORIGIN = __user_ram_start, LENGTH = __user_ram_length
@ -44,7 +54,7 @@ SECTIONS {
LONG(__vcodesig_options)
FILL(0)
. = __user_ram_start + __code_signature_length;
. = __user_ram_start + 0x20;
} > USER_RAM
/*
@ -125,7 +135,8 @@ SECTIONS {
*/
.heap (NOLOAD) : {
__heap_start = .;
. = __heap_end;
. = __stack_bottom;
__heap_end = .;
} > USER_RAM
.stack (NOLOAD) : ALIGN(8) {

View file

@ -122,7 +122,7 @@ impl Buffer {
/// Remove bytes that have already been read from the buffer.
pub fn backshift(&mut self) {
self.buf.copy_within(self.pos.., 0);
self.buf.copy_within(self.pos..self.filled, 0);
self.filled -= self.pos;
self.pos = 0;
}

View file

@ -39,7 +39,7 @@ use crate::{
};
/// Build a standard library for the given `target` using the given `build_compiler`.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Std {
pub target: TargetSelection,
/// Compiler that builds the standard library.
@ -949,7 +949,7 @@ pub struct BuiltRustc {
/// so that it can compile build scripts and proc macros when building this `rustc`.
/// - Makes sure that `build_compiler` has a standard library prepared for `target`,
/// so that the built `rustc` can *link to it* and use it at runtime.
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
/// The target on which rustc will run (its host).
pub target: TargetSelection,
@ -1960,7 +1960,7 @@ impl Step for Sysroot {
/// linker wrappers (LLD, LLVM bitcode linker, etc.).
///
/// This will assemble a compiler in `build/$target/stage$stage`.
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Assemble {
/// The compiler which we will produce in this step. Assemble itself will
/// take care of ensuring that the necessary prerequisites to do so exist,

View file

@ -54,7 +54,7 @@ fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool {
builder.config.tools.as_ref().is_none_or(|tools| tools.contains(tool))
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Docs {
pub host: TargetSelection,
}
@ -91,7 +91,7 @@ impl Step for Docs {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct JsonDocs {
build_compiler: Compiler,
target: TargetSelection,
@ -354,7 +354,7 @@ fn get_cc_search_dirs(
(bin_path, lib_path)
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Mingw {
pub host: TargetSelection,
}
@ -394,7 +394,7 @@ impl Step for Mingw {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
pub compiler: Compiler,
}
@ -730,7 +730,7 @@ fn copy_target_libs(
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub compiler: Compiler,
pub target: TargetSelection,
@ -785,7 +785,7 @@ impl Step for Std {
/// `rust.download-rustc`.
///
/// (Don't confuse this with [`RustDev`], without the `c`!)
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcDev {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1026,7 +1026,7 @@ fn copy_src_dirs(
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Src;
impl Step for Src {
@ -1087,7 +1087,7 @@ impl Step for Src {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct PlainSourceTarball;
impl Step for PlainSourceTarball {
@ -1233,7 +1233,7 @@ impl Step for PlainSourceTarball {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub build_compiler: Compiler,
pub target: TargetSelection,
@ -1287,7 +1287,7 @@ impl Step for Cargo {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzer {
pub build_compiler: Compiler,
pub target: TargetSelection,
@ -1563,7 +1563,7 @@ impl Step for Rustfmt {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Extended {
stage: u32,
host: TargetSelection,
@ -2404,7 +2404,7 @@ impl Step for LlvmTools {
/// Distributes the `llvm-bitcode-linker` tool so that it can be used by a compiler whose host
/// is `target`.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct LlvmBitcodeLinker {
/// The linker will be compiled by this compiler.
pub build_compiler: Compiler,

View file

@ -580,7 +580,7 @@ impl Step for SharedAssets {
}
/// Document the standard library using `build_compiler`.
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Std {
build_compiler: Compiler,
target: TargetSelection,
@ -715,7 +715,7 @@ impl Step for Std {
/// or remote link.
const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum DocumentationFormat {
Html,
Json,
@ -1230,7 +1230,7 @@ fn symlink_dir_force(config: &Config, original: &Path, link: &Path) {
}
/// Builds the Rust compiler book.
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcBook {
build_compiler: Compiler,
target: TargetSelection,
@ -1334,7 +1334,7 @@ impl Step for RustcBook {
/// Documents the reference.
/// It has to always be done using a stage 1+ compiler, because it references in-tree
/// compiler/stdlib concepts.
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Reference {
build_compiler: Compiler,
target: TargetSelection,

View file

@ -17,7 +17,7 @@ use crate::core::config::flags::get_completion;
use crate::utils::exec::command;
use crate::{Mode, t};
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct BuildManifest;
impl Step for BuildManifest {
@ -56,7 +56,7 @@ impl Step for BuildManifest {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct BumpStage0;
impl Step for BumpStage0 {
@ -78,7 +78,7 @@ impl Step for BumpStage0 {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct ReplaceVersionPlaceholder;
impl Step for ReplaceVersionPlaceholder {
@ -169,7 +169,7 @@ impl Step for Miri {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CollectLicenseMetadata;
impl Step for CollectLicenseMetadata {
@ -200,7 +200,7 @@ impl Step for CollectLicenseMetadata {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct GenerateCopyright;
impl Step for GenerateCopyright {
@ -264,7 +264,7 @@ impl Step for GenerateCopyright {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct GenerateWindowsSys;
impl Step for GenerateWindowsSys {
@ -326,7 +326,7 @@ impl Step for GenerateCompletions {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct UnicodeTableGenerator;
impl Step for UnicodeTableGenerator {
@ -348,7 +348,7 @@ impl Step for UnicodeTableGenerator {
}
}
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct FeaturesStatusDump;
impl Step for FeaturesStatusDump {
@ -408,7 +408,7 @@ impl Step for CyclicStep {
///
/// The coverage-dump tool is an internal detail of coverage tests, so this run
/// step is only needed when testing coverage-dump manually.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CoverageDump;
impl Step for CoverageDump {

View file

@ -2732,7 +2732,7 @@ fn prepare_cargo_test(
/// FIXME(Zalathar): Try to split this into two separate steps: a user-visible
/// step for testing standard library crates, and an internal step used for both
/// library crates and compiler crates.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Crate {
pub compiler: Compiler,
pub target: TargetSelection,
@ -3747,7 +3747,7 @@ impl Step for TestFloatParse {
/// Runs the tool `src/tools/collect-license-metadata` in `ONLY_CHECK=1` mode,
/// which verifies that `license-metadata.json` is up-to-date and therefore
/// running the tool normally would not update anything.
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CollectLicenseMetadata;
impl Step for CollectLicenseMetadata {

View file

@ -324,7 +324,7 @@ impl FromStr for LlvmLibunwind {
}
}
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SplitDebuginfo {
Packed,
Unpacked,

View file

@ -14,7 +14,7 @@ pub struct TargetSelection {
}
/// Newtype over `Vec<TargetSelection>` so we can implement custom parsing logic
#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[derive(Clone, Default, PartialEq, Eq, Hash, Debug)]
pub struct TargetSelectionList(pub Vec<TargetSelection>);
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {

View file

@ -279,7 +279,7 @@ pub enum DependencyType {
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
pub enum Mode {
/// Build the standard library, placing output in the "stageN-std" directory.
Std,
@ -357,7 +357,7 @@ pub enum RemapScheme {
NonCompiler,
}
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
pub enum CLang {
C,
Cxx,

View file

@ -506,4 +506,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Warning,
summary: "It is no longer possible to `x clippy` with stage 0. All clippy commands have to be on stage 1+.",
},
ChangeInfo {
change_id: 145379,
severity: ChangeSeverity::Info,
summary: "Build/check now supports forwarding `--timings` flag to cargo.",
},
];

View file

@ -16,7 +16,7 @@ system.
## Target maintainers
[@Amanieu](https://github.com/Amanieu)
[@lubinglun](https://github.com/lubinglun)
[@cceerczw](https://github.com/cceerczw)
## Requirements

View file

@ -29,8 +29,6 @@ struct S4 {
enum E1 {}
#[derive(From)]
//~^ ERROR the size for values of type `T` cannot be known at compilation time [E0277]
//~| ERROR the size for values of type `T` cannot be known at compilation time [E0277]
struct SUnsizedField<T: ?Sized> {
last: T,
//~^ ERROR the size for values of type `T` cannot be known at compilation time [E0277]

View file

@ -54,45 +54,7 @@ LL | enum E1 {}
= note: `#[derive(From)]` can only be used on structs with exactly one field
error[E0277]: the size for values of type `T` cannot be known at compilation time
--> $DIR/deriving-from-wrong-target.rs:31:10
|
LL | #[derive(From)]
| ^^^^ doesn't have a size known at compile-time
...
LL | struct SUnsizedField<T: ?Sized> {
| - this type parameter needs to be `Sized`
|
note: required by an implicit `Sized` bound in `From`
--> $SRC_DIR/core/src/convert/mod.rs:LL:COL
help: consider removing the `?Sized` bound to make the type parameter `Sized`
|
LL - struct SUnsizedField<T: ?Sized> {
LL + struct SUnsizedField<T> {
|
error[E0277]: the size for values of type `T` cannot be known at compilation time
--> $DIR/deriving-from-wrong-target.rs:31:10
|
LL | #[derive(From)]
| ^^^^ doesn't have a size known at compile-time
...
LL | struct SUnsizedField<T: ?Sized> {
| - this type parameter needs to be `Sized`
|
note: required because it appears within the type `SUnsizedField<T>`
--> $DIR/deriving-from-wrong-target.rs:34:8
|
LL | struct SUnsizedField<T: ?Sized> {
| ^^^^^^^^^^^^^
= note: the return type of a function must have a statically known size
help: consider removing the `?Sized` bound to make the type parameter `Sized`
|
LL - struct SUnsizedField<T: ?Sized> {
LL + struct SUnsizedField<T> {
|
error[E0277]: the size for values of type `T` cannot be known at compilation time
--> $DIR/deriving-from-wrong-target.rs:35:11
--> $DIR/deriving-from-wrong-target.rs:33:11
|
LL | struct SUnsizedField<T: ?Sized> {
| - this type parameter needs to be `Sized`
@ -110,6 +72,6 @@ help: function arguments must have a statically known size, borrowed types alway
LL | last: &T,
| +
error: aborting due to 8 previous errors
error: aborting due to 6 previous errors
For more information about this error, try `rustc --explain E0277`.

View file

@ -3,6 +3,6 @@ use proc_macro::TokenStream;
#[proc_macro]
pub fn check(_: TokenStream) -> TokenStream {
assert!("---\n---".parse::<TokenStream>().unwrap().is_empty());
assert_eq!(6, "---\n---".parse::<TokenStream>().unwrap().into_iter().count());
Default::default()
}

View file

@ -2,11 +2,10 @@
//@ proc-macro: makro.rs
//@ edition: 2021
#![feature(frontmatter)]
makro::check!();
// checks that a proc-macro cannot observe frontmatter tokens.
// checks that a proc-macro doesn't know or parse frontmatters at all and instead treats
// it as normal Rust code.
// see auxiliary/makro.rs for how it is tested.
fn main() {}

View file

@ -1071,7 +1071,7 @@ cc = ["@rust-lang/rustfmt"]
[mentions."compiler/rustc_middle/src/mir/syntax.rs"]
message = "This PR changes MIR"
cc = ["@oli-obk", "@RalfJung", "@JakobDegen", "@davidtwco", "@vakaras"]
cc = ["@oli-obk", "@RalfJung", "@JakobDegen", "@vakaras"]
[mentions."compiler/rustc_error_messages"]
message = "`rustc_error_messages` was changed"
@ -1405,7 +1405,6 @@ arena = [
"@spastorino",
]
mir = [
"@davidtwco",
"@oli-obk",
"@matthewjasper",
"@saethlin",
@ -1592,6 +1591,8 @@ days-threshold = 28
# Prevents mentions in commits to avoid users being spammed
# Documentation at: https://forge.rust-lang.org/triagebot/no-mentions.html
[no-mentions]
# Subtree update authors can't fix it, no point in warning.
exclude-titles = ["subtree update"]
# Allow members to formally register concerns (`@rustbot concern my concern`)
# Documentation at: https://forge.rust-lang.org/triagebot/concern.html