Merge pull request #4345 from rust-lang/rustup-2025-05-23

Automatic Rustup
This commit is contained in:
Ralf Jung 2025-05-23 05:49:57 +00:00 committed by GitHub
commit fa5a5070ab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
434 changed files with 8716 additions and 5581 deletions

View file

@ -73,6 +73,15 @@ jobs:
needs: [ calculate_matrix ]
runs-on: "${{ matrix.os }}"
timeout-minutes: 360
# The bors environment contains secrets required for elevated workflows (try and auto builds),
# which need to access e.g. S3 and upload artifacts. We want to provide access to that
# environment only on the try/auto branches, which are only accessible to bors.
# This also ensures that PR CI (which doesn't get write access to S3) works, as it cannot
# access the environment.
#
# We only enable the environment for the rust-lang/rust repository, so that rust-lang-ci/rust
# CI works until we migrate off it (since that repository doesn't contain the environment).
environment: ${{ ((github.repository == 'rust-lang/rust' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/auto')) && 'bors') || '' }}
env:
CI_JOB_NAME: ${{ matrix.name }}
CI_JOB_DOC_URL: ${{ matrix.doc_url }}

View file

@ -582,13 +582,11 @@ dependencies = [
name = "clippy_dev"
version = "0.0.1"
dependencies = [
"aho-corasick",
"chrono",
"clap",
"indoc",
"itertools",
"opener",
"shell-escape",
"walkdir",
]
@ -3809,6 +3807,7 @@ dependencies = [
"rustc_abi",
"rustc_ast",
"rustc_attr_data_structures",
"rustc_attr_parsing",
"rustc_data_structures",
"rustc_errors",
"rustc_fluent_macro",
@ -4887,12 +4886,6 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "shell-escape"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f"
[[package]]
name = "shlex"
version = "1.3.0"

View file

@ -26,3 +26,33 @@ pub fn is_builtin_attr(attr: &impl AttributeExt) -> bool {
pub fn find_crate_name(attrs: &[impl AttributeExt]) -> Option<Symbol> {
first_attr_value_str_by_name(attrs, sym::crate_name)
}
pub fn is_doc_alias_attrs_contain_symbol<'tcx, T: AttributeExt + 'tcx>(
attrs: impl Iterator<Item = &'tcx T>,
symbol: Symbol,
) -> bool {
let doc_attrs = attrs.filter(|attr| attr.has_name(sym::doc));
for attr in doc_attrs {
let Some(values) = attr.meta_item_list() else {
continue;
};
let alias_values = values.iter().filter(|v| v.has_name(sym::alias));
for v in alias_values {
if let Some(nested) = v.meta_item_list() {
// #[doc(alias("foo", "bar"))]
let mut iter = nested.iter().filter_map(|item| item.lit()).map(|item| item.symbol);
if iter.any(|s| s == symbol) {
return true;
}
} else if let Some(meta) = v.meta_item()
&& let Some(lit) = meta.name_value_literal()
{
// #[doc(alias = "foo")]
if lit.symbol == symbol {
return true;
}
}
}
}
false
}

View file

@ -90,7 +90,9 @@ pub mod parser;
mod session_diagnostics;
pub use attributes::cfg::*;
pub use attributes::util::{find_crate_name, is_builtin_attr, parse_version};
pub use attributes::util::{
find_crate_name, is_builtin_attr, is_doc_alias_attrs_contain_symbol, parse_version,
};
pub use context::{AttributeParser, OmitDoc};
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }

View file

@ -17,7 +17,7 @@ index 1e336bf..35e6f54 100644
@@ -2,5 +2,4 @@
// tidy-alphabetical-start
-#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))]
#![cfg_attr(test, feature(cfg_match))]
#![cfg_attr(test, feature(cfg_select))]
#![feature(alloc_layout_extra)]
#![feature(array_chunks)]
diff --git a/coretests/tests/atomic.rs b/coretests/tests/atomic.rs

View file

@ -4,7 +4,7 @@
//! green/native threading. This is just a bare-bones enough solution for
//! librustdoc, it is not production quality at all.
cfg_match! {
cfg_select! {
target_os = "linux" => {
mod linux;
use linux as imp;

View file

@ -19,7 +19,7 @@
#![feature(ascii_char_variants)]
#![feature(assert_matches)]
#![feature(auto_traits)]
#![feature(cfg_match)]
#![feature(cfg_select)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(extend_one)]

View file

@ -860,7 +860,7 @@ fn get_thread_id() -> u32 {
}
// Memory reporting
cfg_match! {
cfg_select! {
windows => {
pub fn get_resident_set_size() -> Option<usize> {
use windows::{

View file

@ -9,6 +9,7 @@ itertools = "0.12"
rustc_abi = { path = "../rustc_abi" }
rustc_ast = { path = "../rustc_ast" }
rustc_attr_data_structures = { path = "../rustc_attr_data_structures" }
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }

View file

@ -1190,9 +1190,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(ty::FnDef(..), ty::FnDef(..)) => {
// Don't reify if the function types have a LUB, i.e., they
// are the same function and their parameters have a LUB.
match self
.commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
{
match self.commit_if_ok(|_| {
// We need to eagerly handle nested obligations due to lazy norm.
if self.next_trait_solver() {
let ocx = ObligationCtxt::new(self);
let value = ocx.lub(cause, self.param_env, prev_ty, new_ty)?;
if ocx.select_where_possible().is_empty() {
Ok(InferOk {
value,
obligations: ocx.into_pending_obligations(),
})
} else {
Err(TypeError::Mismatch)
}
} else {
self.at(cause, self.param_env).lub(prev_ty, new_ty)
}
}) {
// We have a LUB of prev_ty and new_ty, just return it.
Ok(ok) => return Ok(self.register_infer_ok_obligations(ok)),
Err(_) => {

View file

@ -532,14 +532,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ExprKind::Break(destination, ref expr_opt) => {
self.check_expr_break(destination, expr_opt.as_deref(), expr)
}
ExprKind::Continue(destination) => {
if destination.target_id.is_ok() {
tcx.types.never
} else {
// There was an error; make type-check fail.
Ty::new_misc_error(tcx)
}
}
ExprKind::Continue(destination) => self.check_expr_continue(destination, expr),
ExprKind::Ret(ref expr_opt) => self.check_expr_return(expr_opt.as_deref(), expr),
ExprKind::Become(call) => self.check_expr_become(call, expr),
ExprKind::Let(let_expr) => self.check_expr_let(let_expr, expr.hir_id),
@ -989,6 +982,31 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
fn check_expr_continue(
&self,
destination: hir::Destination,
expr: &'tcx hir::Expr<'tcx>,
) -> Ty<'tcx> {
if let Ok(target_id) = destination.target_id {
if let hir::Node::Expr(hir::Expr { kind: ExprKind::Loop(..), .. }) =
self.tcx.hir_node(target_id)
{
self.tcx.types.never
} else {
// Liveness linting assumes `continue`s all point to loops. We'll report an error
// in `check_mod_loops`, but make sure we don't run liveness (#113379, #121623).
let guar = self.dcx().span_delayed_bug(
expr.span,
"found `continue` not pointing to loop, but no error reported",
);
Ty::new_error(self.tcx, guar)
}
} else {
// There was an error; make type-check fail.
Ty::new_misc_error(self.tcx)
}
}
fn check_expr_return(
&self,
expr_opt: Option<&'tcx hir::Expr<'tcx>>,

View file

@ -2,6 +2,7 @@
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![feature(array_windows)]
#![feature(assert_matches)]
#![feature(box_patterns)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]

View file

@ -1,7 +1,9 @@
use std::assert_matches::debug_assert_matches;
use std::cell::{Cell, RefCell};
use std::cmp::max;
use std::ops::Deref;
use rustc_attr_parsing::is_doc_alias_attrs_contain_symbol;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sso::SsoHashSet;
use rustc_errors::Applicability;
@ -15,7 +17,7 @@ use rustc_infer::traits::ObligationCauseCode;
use rustc_middle::middle::stability;
use rustc_middle::query::Providers;
use rustc_middle::ty::elaborate::supertrait_def_ids;
use rustc_middle::ty::fast_reject::{TreatParams, simplify_type};
use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams, simplify_type};
use rustc_middle::ty::{
self, AssocItem, AssocItemContainer, GenericArgs, GenericArgsRef, GenericParamDefKind,
ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt, Upcast,
@ -806,8 +808,8 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
);
}
}
ty::Param(p) => {
self.assemble_inherent_candidates_from_param(p);
ty::Param(_) => {
self.assemble_inherent_candidates_from_param(raw_self_ty);
}
ty::Bool
| ty::Char
@ -908,18 +910,16 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
#[instrument(level = "debug", skip(self))]
fn assemble_inherent_candidates_from_param(&mut self, param_ty: ty::ParamTy) {
fn assemble_inherent_candidates_from_param(&mut self, param_ty: Ty<'tcx>) {
debug_assert_matches!(param_ty.kind(), ty::Param(_));
let tcx = self.tcx;
let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| {
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
ty::ClauseKind::Trait(trait_predicate) => {
match *trait_predicate.trait_ref.self_ty().kind() {
ty::Param(p) if p == param_ty => {
Some(bound_predicate.rebind(trait_predicate.trait_ref))
}
_ => None,
}
}
ty::ClauseKind::Trait(trait_predicate) => DeepRejectCtxt::relate_rigid_rigid(tcx)
.types_may_unify(param_ty, trait_predicate.trait_ref.self_ty())
.then(|| bound_predicate.rebind(trait_predicate.trait_ref)),
ty::ClauseKind::RegionOutlives(_)
| ty::ClauseKind::TypeOutlives(_)
| ty::ClauseKind::Projection(_)
@ -2333,10 +2333,13 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
};
let hir_id = self.fcx.tcx.local_def_id_to_hir_id(local_def_id);
let attrs = self.fcx.tcx.hir_attrs(hir_id);
if is_doc_alias_attrs_contain_symbol(attrs.into_iter(), method.name) {
return true;
}
for attr in attrs {
if attr.has_name(sym::doc) {
// do nothing
} else if attr.has_name(sym::rustc_confusables) {
if attr.has_name(sym::rustc_confusables) {
let Some(confusables) = attr.meta_item_list() else {
continue;
};
@ -2348,33 +2351,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
return true;
}
}
continue;
} else {
continue;
};
let Some(values) = attr.meta_item_list() else {
continue;
};
for v in values {
if !v.has_name(sym::alias) {
continue;
}
if let Some(nested) = v.meta_item_list() {
// #[doc(alias("foo", "bar"))]
for n in nested {
if let Some(lit) = n.lit()
&& method.name == lit.symbol
{
return true;
}
}
} else if let Some(meta) = v.meta_item()
&& let Some(lit) = meta.name_value_literal()
&& method.name == lit.symbol
{
// #[doc(alias = "foo")]
return true;
}
}
}
false

View file

@ -45,7 +45,7 @@ use crate::interface::Compiler;
use crate::{errors, limits, proc_macro_decls, util};
pub fn parse<'a>(sess: &'a Session) -> ast::Crate {
let krate = sess
let mut krate = sess
.time("parse_crate", || {
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
Input::File(file) => new_parser_from_file(&sess.psess, file, None),
@ -64,6 +64,12 @@ pub fn parse<'a>(sess: &'a Session) -> ast::Crate {
input_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS", "ast-stats-1");
}
rustc_builtin_macros::cmdline_attrs::inject(
&mut krate,
&sess.psess,
&sess.opts.unstable_opts.crate_attr,
);
krate
}
@ -805,17 +811,11 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
pub fn create_and_enter_global_ctxt<T, F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> T>(
compiler: &Compiler,
mut krate: rustc_ast::Crate,
krate: rustc_ast::Crate,
f: F,
) -> T {
let sess = &compiler.sess;
rustc_builtin_macros::cmdline_attrs::inject(
&mut krate,
&sess.psess,
&sess.opts.unstable_opts.crate_attr,
);
let pre_configured_attrs = rustc_expand::config::pre_configure_attrs(sess, &krate.attrs);
let crate_name = get_crate_name(sess, &pre_configured_attrs);

View file

@ -13,6 +13,8 @@ lint_ambiguous_negative_literals = `-` has lower precedence than method calls, w
lint_ambiguous_wide_pointer_comparisons = ambiguous wide pointer comparison, the comparison includes metadata which may not be expected
.addr_metadata_suggestion = use explicit `std::ptr::eq` method to compare metadata and addresses
.addr_suggestion = use `std::ptr::addr_eq` or untyped pointers to only compare their addresses
.cast_suggestion = use untyped pointers to only compare their addresses
.expect_suggestion = or expect the lint to compare the pointers metadata and addresses
lint_associated_const_elided_lifetime = {$elided ->
[true] `&` without an explicit lifetime name cannot be used here

View file

@ -1782,13 +1782,20 @@ pub(crate) enum InvalidNanComparisonsSuggestion {
#[derive(LintDiagnostic)]
pub(crate) enum AmbiguousWidePointerComparisons<'a> {
#[diag(lint_ambiguous_wide_pointer_comparisons)]
Spanful {
SpanfulEq {
#[subdiagnostic]
addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion<'a>,
#[subdiagnostic]
addr_metadata_suggestion: Option<AmbiguousWidePointerComparisonsAddrMetadataSuggestion<'a>>,
},
#[diag(lint_ambiguous_wide_pointer_comparisons)]
SpanfulCmp {
#[subdiagnostic]
cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion<'a>,
#[subdiagnostic]
expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion<'a>,
},
#[diag(lint_ambiguous_wide_pointer_comparisons)]
#[help(lint_addr_metadata_suggestion)]
#[help(lint_addr_suggestion)]
Spanless,
@ -1816,48 +1823,67 @@ pub(crate) struct AmbiguousWidePointerComparisonsAddrMetadataSuggestion<'a> {
}
#[derive(Subdiagnostic)]
pub(crate) enum AmbiguousWidePointerComparisonsAddrSuggestion<'a> {
#[multipart_suggestion(
lint_addr_suggestion,
style = "verbose",
// FIXME(#53934): make machine-applicable again
applicability = "maybe-incorrect"
#[multipart_suggestion(
lint_addr_suggestion,
style = "verbose",
// FIXME(#53934): make machine-applicable again
applicability = "maybe-incorrect"
)]
pub(crate) struct AmbiguousWidePointerComparisonsAddrSuggestion<'a> {
pub(crate) ne: &'a str,
pub(crate) deref_left: &'a str,
pub(crate) deref_right: &'a str,
pub(crate) l_modifiers: &'a str,
pub(crate) r_modifiers: &'a str,
#[suggestion_part(code = "{ne}std::ptr::addr_eq({deref_left}")]
pub(crate) left: Span,
#[suggestion_part(code = "{l_modifiers}, {deref_right}")]
pub(crate) middle: Span,
#[suggestion_part(code = "{r_modifiers})")]
pub(crate) right: Span,
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(
lint_cast_suggestion,
style = "verbose",
// FIXME(#53934): make machine-applicable again
applicability = "maybe-incorrect"
)]
pub(crate) struct AmbiguousWidePointerComparisonsCastSuggestion<'a> {
pub(crate) deref_left: &'a str,
pub(crate) deref_right: &'a str,
pub(crate) paren_left: &'a str,
pub(crate) paren_right: &'a str,
pub(crate) l_modifiers: &'a str,
pub(crate) r_modifiers: &'a str,
#[suggestion_part(code = "({deref_left}")]
pub(crate) left_before: Option<Span>,
#[suggestion_part(code = "{l_modifiers}{paren_left}.cast::<()>()")]
pub(crate) left_after: Span,
#[suggestion_part(code = "({deref_right}")]
pub(crate) right_before: Option<Span>,
#[suggestion_part(code = "{r_modifiers}{paren_right}.cast::<()>()")]
pub(crate) right_after: Span,
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(
lint_expect_suggestion,
style = "verbose",
// FIXME(#53934): make machine-applicable again
applicability = "maybe-incorrect"
)]
pub(crate) struct AmbiguousWidePointerComparisonsExpectSuggestion<'a> {
pub(crate) paren_left: &'a str,
pub(crate) paren_right: &'a str,
// FIXME(#127436): Adjust once resolved
#[suggestion_part(
code = r#"{{ #[expect(ambiguous_wide_pointer_comparisons, reason = "...")] {paren_left}"#
)]
AddrEq {
ne: &'a str,
deref_left: &'a str,
deref_right: &'a str,
l_modifiers: &'a str,
r_modifiers: &'a str,
#[suggestion_part(code = "{ne}std::ptr::addr_eq({deref_left}")]
left: Span,
#[suggestion_part(code = "{l_modifiers}, {deref_right}")]
middle: Span,
#[suggestion_part(code = "{r_modifiers})")]
right: Span,
},
#[multipart_suggestion(
lint_addr_suggestion,
style = "verbose",
// FIXME(#53934): make machine-applicable again
applicability = "maybe-incorrect"
)]
Cast {
deref_left: &'a str,
deref_right: &'a str,
paren_left: &'a str,
paren_right: &'a str,
l_modifiers: &'a str,
r_modifiers: &'a str,
#[suggestion_part(code = "({deref_left}")]
left_before: Option<Span>,
#[suggestion_part(code = "{l_modifiers}{paren_left}.cast::<()>()")]
left_after: Span,
#[suggestion_part(code = "({deref_right}")]
right_before: Option<Span>,
#[suggestion_part(code = "{r_modifiers}{paren_right}.cast::<()>()")]
right_after: Span,
},
pub(crate) before: Span,
#[suggestion_part(code = "{paren_right} }}")]
pub(crate) after: Span,
}
#[derive(LintDiagnostic)]

View file

@ -24,7 +24,8 @@ mod improper_ctypes;
use crate::lints::{
AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
AmbiguousWidePointerComparisonsAddrSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
AtomicOrderingStore, ImproperCTypes, InvalidAtomicOrderingDiag, InvalidNanComparisons,
InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons, UsesPowerAlignment,
@ -362,6 +363,7 @@ fn lint_wide_pointer<'tcx>(
let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
let via_method_call = matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
@ -376,9 +378,21 @@ fn lint_wide_pointer<'tcx>(
cx.emit_span_lint(
AMBIGUOUS_WIDE_POINTER_COMPARISONS,
e.span,
AmbiguousWidePointerComparisons::Spanful {
addr_metadata_suggestion: (is_eq_ne && !is_dyn_comparison).then(|| {
AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
if is_eq_ne {
AmbiguousWidePointerComparisons::SpanfulEq {
addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
ne,
deref_left,
deref_right,
l_modifiers,
r_modifiers,
left,
middle,
right,
}
}),
addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
ne,
deref_left,
deref_right,
@ -387,21 +401,11 @@ fn lint_wide_pointer<'tcx>(
left,
middle,
right,
}
}),
addr_suggestion: if is_eq_ne {
AmbiguousWidePointerComparisonsAddrSuggestion::AddrEq {
ne,
deref_left,
deref_right,
l_modifiers,
r_modifiers,
left,
middle,
right,
}
} else {
AmbiguousWidePointerComparisonsAddrSuggestion::Cast {
},
}
} else {
AmbiguousWidePointerComparisons::SpanfulCmp {
cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
deref_left,
deref_right,
l_modifiers,
@ -412,8 +416,14 @@ fn lint_wide_pointer<'tcx>(
left_after: l_span.shrink_to_hi(),
right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
right_after: r_span.shrink_to_hi(),
}
},
},
expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
paren_left: if via_method_call { "" } else { "(" },
paren_right: if via_method_call { "" } else { ")" },
before: e.span.shrink_to_lo(),
after: e.span.shrink_to_hi(),
},
}
},
);
}

View file

@ -51,6 +51,7 @@ impl Parse for Query {
let key = Pat::parse_single(&arg_content)?;
arg_content.parse::<Token![:]>()?;
let arg = arg_content.parse()?;
let _ = arg_content.parse::<Option<Token![,]>>()?;
let result = input.parse()?;
// Parse the query modifiers

View file

@ -1882,9 +1882,9 @@ impl<'tcx> Ty<'tcx> {
// Needs normalization or revealing to determine, so no is the safe answer.
ty::Alias(..) => false,
ty::Param(..) | ty::Infer(..) | ty::Error(..) => false,
ty::Param(..) | ty::Placeholder(..) | ty::Infer(..) | ty::Error(..) => false,
ty::Bound(..) | ty::Placeholder(..) => {
ty::Bound(..) => {
bug!("`is_trivially_pure_clone_copy` applied to unexpected type: {:?}", self);
}
}

View file

@ -66,8 +66,7 @@ pub fn build_mir<'tcx>(tcx: TyCtxt<'tcx>, def: LocalDefId) -> Body<'tcx> {
}
};
// this must run before MIR dump, because
// "not all control paths return a value" is reported here.
// Checking liveness after building the THIR ensures there were no typeck errors.
//
// maybe move the check to a MIR pass?
tcx.ensure_ok().check_liveness(def);
@ -451,10 +450,6 @@ fn construct_fn<'tcx>(
let span = tcx.def_span(fn_def);
let fn_id = tcx.local_def_id_to_hir_id(fn_def);
// The representation of thir for `-Zunpretty=thir-tree` relies on
// the entry expression being the last element of `thir.exprs`.
assert_eq!(expr.as_usize(), thir.exprs.len() - 1);
// Figure out what primary body this item has.
let body = tcx.hir_body_owned_by(fn_def);
let span_with_body = tcx.hir_span_with_body(fn_id);

View file

@ -27,8 +27,8 @@ pub(crate) fn thir_body(
if let Some(reported) = cx.typeck_results.tainted_by_errors {
return Err(reported);
}
let expr = cx.mirror_expr(body.value);
// Lower the params before the body's expression so errors from params are shown first.
let owner_id = tcx.local_def_id_to_hir_id(owner_def);
if let Some(fn_decl) = tcx.hir_fn_decl_by_hir_id(owner_id) {
let closure_env_param = cx.closure_env_param(owner_def, owner_id);
@ -48,6 +48,7 @@ pub(crate) fn thir_body(
}
}
let expr = cx.mirror_expr(body.value);
Ok((tcx.alloc_steal_thir(cx.thir), expr))
}

View file

@ -8,10 +8,10 @@ use rustc_span::def_id::LocalDefId;
/// Create a THIR tree for debugging.
pub fn thir_tree(tcx: TyCtxt<'_>, owner_def: LocalDefId) -> String {
match super::cx::thir_body(tcx, owner_def) {
Ok((thir, _)) => {
Ok((thir, expr)) => {
let thir = thir.steal();
let mut printer = ThirPrinter::new(&thir);
printer.print();
printer.print(expr);
printer.into_buffer()
}
Err(_) => "error".into(),
@ -58,7 +58,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
}
}
fn print(&mut self) {
fn print(&mut self, body_expr: ExprId) {
print_indented!(self, "params: [", 0);
for param in self.thir.params.iter() {
self.print_param(param, 1);
@ -66,8 +66,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
print_indented!(self, "]", 0);
print_indented!(self, "body:", 0);
let expr = ExprId::from_usize(self.thir.exprs.len() - 1);
self.print_expr(expr, 1);
self.print_expr(body_expr, 1);
}
fn into_buffer(self) -> String {

View file

@ -9,6 +9,7 @@ use crate::errors::UnnecessaryTransmute as Error;
/// Check for transmutes that overlap with stdlib methods.
/// For example, transmuting `[u8; 4]` to `u32`.
/// We chose not to lint u8 -> bool transmutes, see #140431
pub(super) struct CheckUnnecessaryTransmutes;
impl<'tcx> crate::MirLint<'tcx> for CheckUnnecessaryTransmutes {
@ -98,8 +99,6 @@ impl<'a, 'tcx> UnnecessaryTransmuteChecker<'a, 'tcx> {
(Uint(_), Float(ty)) => err(format!("{}::from_bits({arg})", ty.name_str())),
// bool → { x8 }
(Bool, Int(..) | Uint(..)) => err(format!("({arg}) as {}", fn_sig.output())),
// u8 → bool
(Uint(_), Bool) => err(format!("({arg} == 1)")),
_ => return None,
})
}

View file

@ -251,7 +251,29 @@ where
span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
}
};
let drop_fn_def_id = tcx.associated_item_def_ids(drop_trait)[0];
// impl_item_refs may be empty if drop fn is not implemented in 'impl AsyncDrop for ...'
// (#140974).
// Such code will report error, so just generate sync drop here and return
let Some(drop_fn_def_id) =
tcx.associated_item_def_ids(drop_trait).into_iter().nth(0).copied()
else {
tcx.dcx().span_delayed_bug(
self.elaborator.body().span,
"AsyncDrop type without correct `async fn drop(...)`.",
);
self.elaborator.patch().patch_terminator(
pin_obj_bb,
TerminatorKind::Drop {
place,
target: succ,
unwind: unwind.into_action(),
replace: false,
drop: None,
async_fut: None,
},
);
return pin_obj_bb;
};
let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
let sig = drop_fn.fn_sig(tcx);
let sig = tcx.instantiate_bound_regions_with_erased(sig);
@ -318,15 +340,20 @@ where
bug!();
};
let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
let addr = Rvalue::RawPtr(
RawPtrKind::Mut,
pin_obj_place.project_deeper(
&[ProjectionElem::Field(FieldIdx::ZERO, unwrap_ty), ProjectionElem::Deref],
tcx,
),
);
let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
call_statements.push(self.assign(
obj_ref_place,
Rvalue::Use(Operand::Copy(tcx.mk_place_field(
pin_obj_place,
FieldIdx::ZERO,
unwrap_ty,
))),
));
let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
call_statements.push(self.assign(obj_ptr_place, addr));
obj_ptr_place
};

View file

@ -3,6 +3,8 @@ use std::ops::Deref;
use rustc_type_ir::solve::{Certainty, Goal, NoSolution};
use rustc_type_ir::{self as ty, InferCtxtLike, Interner, TypeFoldable};
use crate::solve::HasChanged;
pub trait SolverDelegate: Deref<Target = Self::Infcx> + Sized {
type Infcx: InferCtxtLike<Interner = Self::Interner>;
type Interner: Interner;
@ -17,6 +19,12 @@ pub trait SolverDelegate: Deref<Target = Self::Infcx> + Sized {
where
V: TypeFoldable<Self::Interner>;
fn compute_goal_fast_path(
&self,
goal: Goal<Self::Interner, <Self::Interner as Interner>::Predicate>,
span: <Self::Interner as Interner>::Span,
) -> Option<HasChanged>;
fn fresh_var_for_kind_with_span(
&self,
arg: <Self::Interner as Interner>::GenericArg,

View file

@ -12,6 +12,7 @@
use std::iter;
use rustc_index::IndexVec;
use rustc_type_ir::data_structures::HashSet;
use rustc_type_ir::inherent::*;
use rustc_type_ir::relate::solver_relating::RelateExt;
use rustc_type_ir::{
@ -158,10 +159,12 @@ where
self.compute_external_query_constraints(certainty, normalization_nested_goals);
let (var_values, mut external_constraints) = (self.var_values, external_constraints)
.fold_with(&mut EagerResolver::new(self.delegate));
// Remove any trivial region constraints once we've resolved regions
external_constraints
.region_constraints
.retain(|outlives| outlives.0.as_region().is_none_or(|re| re != outlives.1));
// Remove any trivial or duplicated region constraints once we've resolved regions
let mut unique = HashSet::default();
external_constraints.region_constraints.retain(|outlives| {
outlives.0.as_region().is_none_or(|re| re != outlives.1) && unique.insert(*outlives)
});
let canonical = Canonicalizer::canonicalize_response(
self.delegate,

View file

@ -603,6 +603,14 @@ where
// If this loop did not result in any progress, what's our final certainty.
let mut unchanged_certainty = Some(Certainty::Yes);
for (source, goal) in mem::take(&mut self.nested_goals) {
if let Some(has_changed) = self.delegate.compute_goal_fast_path(goal, self.origin_span)
{
if matches!(has_changed, HasChanged::Yes) {
unchanged_certainty = None;
}
continue;
}
// We treat normalizes-to goals specially here. In each iteration we take the
// RHS of the projection, replace it with a fresh inference variable, and only
// after evaluating that goal do we equate the fresh inference variable with the

View file

@ -9,7 +9,7 @@ use rustc_type_ir::{
self as ty, Interner, Movability, TraitPredicate, TypeVisitableExt as _, TypingMode,
Upcast as _, elaborate,
};
use tracing::{instrument, trace};
use tracing::{debug, instrument, trace};
use crate::delegate::SolverDelegate;
use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes};
@ -17,7 +17,7 @@ use crate::solve::assembly::{self, AllowInferenceConstraints, AssembleCandidates
use crate::solve::inspect::ProbeKind;
use crate::solve::{
BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause,
NoSolution, ParamEnvSource, QueryResult,
NoSolution, ParamEnvSource, QueryResult, has_only_region_constraints,
};
impl<D, I> assembly::GoalKind<D> for TraitPredicate<I>
@ -1253,6 +1253,45 @@ where
D: SolverDelegate<Interner = I>,
I: Interner,
{
/// FIXME(#57893): For backwards compatability with the old trait solver implementation,
/// we need to handle overlap between builtin and user-written impls for trait objects.
///
/// This overlap is unsound in general and something which we intend to fix separately.
/// To avoid blocking the stabilization of the trait solver, we add this hack to avoid
/// breakage in cases which are *mostly fine*™. Importantly, this preference is strictly
/// weaker than the old behavior.
///
/// We only prefer builtin over user-written impls if there are no inference constraints.
/// Importantly, we also only prefer the builtin impls for trait goals, and not during
/// normalization. This means the only case where this special-case results in exploitable
/// unsoundness should be lifetime dependent user-written impls.
pub(super) fn unsound_prefer_builtin_dyn_impl(&mut self, candidates: &mut Vec<Candidate<I>>) {
match self.typing_mode() {
TypingMode::Coherence => return,
TypingMode::Analysis { .. }
| TypingMode::Borrowck { .. }
| TypingMode::PostBorrowckAnalysis { .. }
| TypingMode::PostAnalysis => {}
}
if candidates
.iter()
.find(|c| {
matches!(c.source, CandidateSource::BuiltinImpl(BuiltinImplSource::Object(_)))
})
.is_some_and(|c| has_only_region_constraints(c.result))
{
candidates.retain(|c| {
if matches!(c.source, CandidateSource::Impl(_)) {
debug!(?c, "unsoundly dropping impl in favor of builtin dyn-candidate");
false
} else {
true
}
});
}
}
#[instrument(level = "debug", skip(self), ret)]
pub(super) fn merge_trait_candidates(
&mut self,
@ -1313,6 +1352,7 @@ where
}
self.filter_specialized_impls(AllowInferenceConstraints::No, &mut candidates);
self.unsound_prefer_builtin_dyn_impl(&mut candidates);
// If there are *only* global where bounds, then make sure to return that this
// is still reported as being proven-via the param-env so that rigid projections

View file

@ -122,7 +122,6 @@ enum LiveNodeKind {
VarDefNode(Span, HirId),
ClosureNode,
ExitNode,
ErrNode,
}
fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
@ -133,7 +132,6 @@ fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
VarDefNode(s, _) => format!("Var def node [{}]", sm.span_to_diagnostic_string(s)),
ClosureNode => "Closure node".to_owned(),
ExitNode => "Exit node".to_owned(),
ErrNode => "Error node".to_owned(),
}
}
@ -492,6 +490,9 @@ struct Liveness<'a, 'tcx> {
impl<'a, 'tcx> Liveness<'a, 'tcx> {
fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
let typeck_results = ir.tcx.typeck(body_owner);
// Liveness linting runs after building the THIR. We make several assumptions based on
// typeck succeeding, e.g. that breaks and continues are well-formed.
assert!(typeck_results.tainted_by_errors.is_none());
// FIXME(#132279): we're in a body here.
let typing_env = ty::TypingEnv::non_body_analysis(ir.tcx, body_owner);
let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner);
@ -976,8 +977,9 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
// Now that we know the label we're going to,
// look it up in the continue loop nodes table
self.cont_ln.get(&sc).cloned().unwrap_or_else(|| {
self.ir.tcx.dcx().span_delayed_bug(expr.span, "continue to unknown label");
self.ir.add_live_node(ErrNode)
// Liveness linting happens after building the THIR. Bad labels should already
// have been caught.
span_bug!(expr.span, "continue to unknown label");
})
}

View file

@ -3901,7 +3901,9 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
// We walk the pattern before declaring the pattern's inner bindings,
// so that we avoid resolving a literal expression to a binding defined
// by the pattern.
visit::walk_pat(self, pat);
// NB: `Self::visit_pat` must be used rather than `visit::walk_pat` to avoid resolving guard
// patterns' guard expressions multiple times (#141265).
self.visit_pat(pat);
self.resolve_pattern_inner(pat, pat_src, bindings);
// This has to happen *after* we determine which pat_idents are variants:
self.check_consistent_bindings(pat);

View file

@ -11,6 +11,7 @@ use rustc_ast::{
Item, ItemKind, MethodCall, NodeId, Path, PathSegment, Ty, TyKind,
};
use rustc_ast_pretty::pprust::where_bound_predicate_to_string;
use rustc_attr_parsing::is_doc_alias_attrs_contain_symbol;
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_errors::codes::*;
use rustc_errors::{
@ -39,7 +40,7 @@ use crate::late::{
};
use crate::ty::fast_reject::SimplifiedType;
use crate::{
Module, ModuleKind, ModuleOrUniformRoot, PathResult, PathSource, Segment, errors,
Module, ModuleKind, ModuleOrUniformRoot, PathResult, PathSource, Resolver, Segment, errors,
path_names_to_string,
};
@ -477,6 +478,19 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
return (err, Vec::new());
}
if let Some((did, item)) = self.lookup_doc_alias_name(path, source.namespace()) {
let item_name = item.name;
let suggestion_name = self.r.tcx.item_name(did);
err.span_suggestion(
item.span,
format!("`{suggestion_name}` has a name defined in the doc alias attribute as `{item_name}`"),
suggestion_name,
Applicability::MaybeIncorrect
);
return (err, Vec::new());
};
let (found, suggested_candidates, mut candidates) = self.try_lookup_name_relaxed(
&mut err,
source,
@ -751,12 +765,24 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
match candidate {
AssocSuggestion::Field(field_span) => {
if self_is_available {
err.span_suggestion_verbose(
span.shrink_to_lo(),
"you might have meant to use the available field",
format!("{pre}self."),
Applicability::MachineApplicable,
);
let source_map = self.r.tcx.sess.source_map();
// check if the field is used in a format string, such as `"{x}"`
let field_is_format_named_arg = source_map
.span_to_source(span, |s, start, _| {
Ok(s.get(start - 1..start) == Some("{"))
});
if let Ok(true) = field_is_format_named_arg {
err.help(
format!("you might have meant to use the available field in a format string: `\"{{}}\", self.{}`", segment.ident.name),
);
} else {
err.span_suggestion_verbose(
span.shrink_to_lo(),
"you might have meant to use the available field",
format!("{pre}self."),
Applicability::MaybeIncorrect,
);
}
} else {
err.span_label(field_span, "a field by that name exists in `Self`");
}
@ -852,6 +878,65 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
(false, suggested_candidates, candidates)
}
fn lookup_doc_alias_name(&mut self, path: &[Segment], ns: Namespace) -> Option<(DefId, Ident)> {
let find_doc_alias_name = |r: &mut Resolver<'ra, '_>, m: Module<'ra>, item_name: Symbol| {
for resolution in r.resolutions(m).borrow().values() {
let Some(did) =
resolution.borrow().binding.and_then(|binding| binding.res().opt_def_id())
else {
continue;
};
if did.is_local() {
// We don't record the doc alias name in the local crate
// because the people who write doc alias are usually not
// confused by them.
continue;
}
if is_doc_alias_attrs_contain_symbol(r.tcx.get_attrs(did, sym::doc), item_name) {
return Some(did);
}
}
None
};
if path.len() == 1 {
for rib in self.ribs[ns].iter().rev() {
let item = path[0].ident;
if let RibKind::Module(module) = rib.kind
&& let Some(did) = find_doc_alias_name(self.r, module, item.name)
{
return Some((did, item));
}
}
} else {
// Finds to the last resolved module item in the path
// and searches doc aliases within that module.
//
// Example: For the path `a::b::last_resolved::not_exist::c::d`,
// we will try to find any item has doc aliases named `not_exist`
// in `last_resolved` module.
//
// - Use `skip(1)` because the final segment must remain unresolved.
for (idx, seg) in path.iter().enumerate().rev().skip(1) {
let Some(id) = seg.id else {
continue;
};
let Some(res) = self.r.partial_res_map.get(&id) else {
continue;
};
if let Res::Def(DefKind::Mod, module) = res.expect_full_res()
&& let Some(module) = self.r.get_module(module)
&& let item = path[idx + 1].ident
&& let Some(did) = find_doc_alias_name(self.r, module, item.name)
{
return Some((did, item));
}
break;
}
}
None
}
fn suggest_trait_and_bounds(
&mut self,
err: &mut Diag<'_>,

View file

@ -29,7 +29,7 @@ pub(crate) fn analyze_source_file(src: &str) -> (Vec<RelativeBytePos>, Vec<Multi
(lines, multi_byte_chars)
}
cfg_match! {
cfg_select! {
any(target_arch = "x86", target_arch = "x86_64") => {
fn analyze_source_file_dispatch(
src: &str,

View file

@ -20,7 +20,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![doc(rust_logo)]
#![feature(array_windows)]
#![feature(cfg_match)]
#![feature(cfg_select)]
#![feature(core_io_borrowed_buf)]
#![feature(hash_set_entry)]
#![feature(if_let_guard)]

View file

@ -2592,7 +2592,8 @@ impl Symbol {
/// (`token_to_string`, `Ident::to_string`), except that symbols don't keep the rawness flag
/// or edition, so we have to guess the rawness using the global edition.
pub fn to_ident_string(self) -> String {
Ident::with_dummy_span(self).to_string()
// Avoid creating an empty identifier, because that asserts in debug builds.
if self == kw::Empty { String::new() } else { Ident::with_dummy_span(self).to_string() }
}
}

View file

@ -2124,16 +2124,19 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
accessed through a specific `impl`",
self.tcx.def_kind_descr(assoc_item.as_def_kind(), item_def_id)
));
err.span_suggestion(
span,
"use the fully qualified path to an implementation",
format!(
"<Type as {}>::{}",
self.tcx.def_path_str(trait_ref),
assoc_item.name()
),
Applicability::HasPlaceholders,
);
if !assoc_item.is_impl_trait_in_trait() {
err.span_suggestion(
span,
"use the fully qualified path to an implementation",
format!(
"<Type as {}>::{}",
self.tcx.def_path_str(trait_ref),
assoc_item.name()
),
Applicability::HasPlaceholders,
);
}
}
}
}

View file

@ -1,19 +1,21 @@
use std::ops::Deref;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::LangItem;
use rustc_hir::def_id::{CRATE_DEF_ID, DefId};
use rustc_infer::infer::canonical::query_response::make_query_region_constraints;
use rustc_infer::infer::canonical::{
Canonical, CanonicalExt as _, CanonicalQueryInput, CanonicalVarInfo, CanonicalVarValues,
};
use rustc_infer::infer::{InferCtxt, RegionVariableOrigin, TyCtxtInferExt};
use rustc_infer::infer::{InferCtxt, RegionVariableOrigin, SubregionOrigin, TyCtxtInferExt};
use rustc_infer::traits::solve::Goal;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::solve::Certainty;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitableExt as _, TypingMode};
use rustc_next_trait_solver::solve::HasChanged;
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span};
use crate::traits::{EvaluateConstErr, specialization_graph};
use crate::traits::{EvaluateConstErr, ObligationCause, specialization_graph};
#[repr(transparent)]
pub struct SolverDelegate<'tcx>(InferCtxt<'tcx>);
@ -55,6 +57,52 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate<
(SolverDelegate(infcx), value, vars)
}
fn compute_goal_fast_path(
&self,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
span: Span,
) -> Option<HasChanged> {
let pred = goal.predicate.kind();
match pred.no_bound_vars()? {
ty::PredicateKind::DynCompatible(def_id) if self.0.tcx.is_dyn_compatible(def_id) => {
Some(HasChanged::No)
}
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(outlives)) => {
self.0.sub_regions(
SubregionOrigin::RelateRegionParamBound(span, None),
outlives.1,
outlives.0,
);
Some(HasChanged::No)
}
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(outlives)) => {
self.0.register_region_obligation_with_cause(
outlives.0,
outlives.1,
&ObligationCause::dummy_with_span(span),
);
Some(HasChanged::No)
}
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => {
match self.0.tcx.as_lang_item(trait_pred.def_id()) {
Some(LangItem::Sized)
if trait_pred.self_ty().is_trivially_sized(self.0.tcx) =>
{
Some(HasChanged::No)
}
Some(LangItem::Copy | LangItem::Clone)
if trait_pred.self_ty().is_trivially_pure_clone_copy() =>
{
Some(HasChanged::No)
}
_ => None,
}
}
_ => None,
}
}
fn fresh_var_for_kind_with_span(
&self,
arg: ty::GenericArg<'tcx>,

View file

@ -12,6 +12,7 @@ use rustc_infer::traits::{
use rustc_middle::ty::{
self, DelayedSet, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, TypingMode,
};
use rustc_next_trait_solver::delegate::SolverDelegate as _;
use rustc_next_trait_solver::solve::{GenerateProofTree, HasChanged, SolverDelegateEvalExt as _};
use rustc_span::Span;
use tracing::instrument;
@ -172,7 +173,15 @@ where
}
let goal = obligation.as_goal();
let result = <&SolverDelegate<'tcx>>::from(infcx)
let delegate = <&SolverDelegate<'tcx>>::from(infcx);
if let Some(fast_path_has_changed) =
delegate.compute_goal_fast_path(goal, obligation.cause.span)
{
has_changed |= matches!(fast_path_has_changed, HasChanged::Yes);
continue;
}
let result = delegate
.evaluate_root_goal(goal, GenerateProofTree::No, obligation.cause.span)
.0;
self.inspect_evaluated_obligation(infcx, &obligation, &result);

View file

@ -188,6 +188,20 @@ where
.map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
}
/// Computes the least-upper-bound, or mutual supertype, of two values.
pub fn lub<T: ToTrace<'tcx>>(
&self,
cause: &ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
expected: T,
actual: T,
) -> Result<T, TypeError<'tcx>> {
self.infcx
.at(cause, param_env)
.lub(expected, actual)
.map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
}
#[must_use]
pub fn select_where_possible(&self) -> Vec<E> {
self.engine.borrow_mut().select_where_possible(self.infcx)

View file

@ -340,7 +340,7 @@ pub fn normalize_param_env_or_error<'tcx>(
let mut predicates: Vec<_> = util::elaborate(
tcx,
unnormalized_env.caller_bounds().into_iter().map(|predicate| {
if tcx.features().generic_const_exprs() {
if tcx.features().generic_const_exprs() || tcx.next_trait_solver_globally() {
return predicate;
}
@ -405,8 +405,6 @@ pub fn normalize_param_env_or_error<'tcx>(
// compatibility. Eventually when lazy norm is implemented this can just be removed.
// We do not normalize types here as there is no backwards compatibility requirement
// for us to do so.
//
// FIXME(-Znext-solver): remove this hack since we have deferred projection equality
predicate.fold_with(&mut ConstNormalizer(tcx))
}),
)

View file

@ -299,12 +299,21 @@ impl<'a, 'b, 'tcx> AssocTypeNormalizer<'a, 'b, 'tcx> {
);
}
// We don't replace bound vars in the generic arguments of the free alias with
// placeholders. This doesn't cause any issues as instantiating parameters with
// bound variables is special-cased to rewrite the debruijn index to be higher
// whenever we fold through a binder.
//
// However, we do replace any escaping bound vars in the resulting goals with
// placeholders as the trait solver does not expect to encounter escaping bound
// vars in obligations.
//
// FIXME(lazy_type_alias): Check how much this actually matters for perf before
// stabilization. This is a bit weird and generally not how we handle binders in
// the compiler so ideally we'd do the same boundvar->placeholder->boundvar dance
// that other kinds of normalization do.
let infcx = self.selcx.infcx;
self.obligations.extend(
// FIXME(BoxyUwU):
// FIXME(lazy_type_alias):
// It seems suspicious to instantiate the predicates with arguments that might be bound vars,
// we might wind up instantiating one of these bound vars underneath a hrtb.
infcx.tcx.predicates_of(free.def_id).instantiate_own(infcx.tcx, free.args).map(
|(mut predicate, span)| {
if free.has_escaping_bound_vars() {

View file

@ -65,71 +65,92 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let def_id = obligation.predicate.def_id();
let tcx = self.tcx();
if tcx.is_lang_item(def_id, LangItem::Copy) {
debug!(obligation_self_ty = ?obligation.predicate.skip_binder().self_ty());
let lang_item = tcx.as_lang_item(def_id);
match lang_item {
Some(LangItem::Copy | LangItem::Clone) => {
debug!(obligation_self_ty = ?obligation.predicate.skip_binder().self_ty());
// User-defined copy impls are permitted, but only for
// structs and enums.
self.assemble_candidates_from_impls(obligation, &mut candidates);
// User-defined copy impls are permitted, but only for
// structs and enums.
self.assemble_candidates_from_impls(obligation, &mut candidates);
// For other types, we'll use the builtin rules.
let copy_conditions = self.copy_clone_conditions(obligation);
self.assemble_builtin_bound_candidates(copy_conditions, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::DiscriminantKind) {
// `DiscriminantKind` is automatically implemented for every type.
candidates.vec.push(BuiltinCandidate { has_nested: false });
} else if tcx.is_lang_item(def_id, LangItem::PointeeTrait) {
// `Pointee` is automatically implemented for every type.
candidates.vec.push(BuiltinCandidate { has_nested: false });
} else if tcx.is_lang_item(def_id, LangItem::Sized) {
self.assemble_builtin_sized_candidate(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::Unsize) {
self.assemble_candidates_for_unsizing(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::Destruct) {
self.assemble_const_destruct_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::TransmuteTrait) {
// User-defined transmutability impls are permitted.
self.assemble_candidates_from_impls(obligation, &mut candidates);
self.assemble_candidates_for_transmutability(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::Tuple) {
self.assemble_candidate_for_tuple(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::FnPtrTrait) {
self.assemble_candidates_for_fn_ptr_trait(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::BikeshedGuaranteedNoDrop) {
self.assemble_candidates_for_bikeshed_guaranteed_no_drop_trait(
obligation,
&mut candidates,
);
} else {
if tcx.is_lang_item(def_id, LangItem::Clone) {
// Same builtin conditions as `Copy`, i.e., every type which has builtin support
// for `Copy` also has builtin support for `Clone`, and tuples/arrays of `Clone`
// types have builtin support for `Clone`.
let clone_conditions = self.copy_clone_conditions(obligation);
self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates);
// For other types, we'll use the builtin rules.
let copy_conditions = self.copy_clone_conditions(obligation);
self.assemble_builtin_bound_candidates(copy_conditions, &mut candidates);
}
if tcx.is_lang_item(def_id, LangItem::Coroutine) {
self.assemble_coroutine_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::Future) {
self.assemble_future_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::Iterator) {
self.assemble_iterator_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::FusedIterator) {
self.assemble_fused_iterator_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::AsyncIterator) {
self.assemble_async_iterator_candidates(obligation, &mut candidates);
} else if tcx.is_lang_item(def_id, LangItem::AsyncFnKindHelper) {
self.assemble_async_fn_kind_helper_candidates(obligation, &mut candidates);
Some(LangItem::DiscriminantKind) => {
// `DiscriminantKind` is automatically implemented for every type.
candidates.vec.push(BuiltinCandidate { has_nested: false });
}
Some(LangItem::PointeeTrait) => {
// `Pointee` is automatically implemented for every type.
candidates.vec.push(BuiltinCandidate { has_nested: false });
}
Some(LangItem::Sized) => {
self.assemble_builtin_sized_candidate(obligation, &mut candidates);
}
Some(LangItem::Unsize) => {
self.assemble_candidates_for_unsizing(obligation, &mut candidates);
}
Some(LangItem::Destruct) => {
self.assemble_const_destruct_candidates(obligation, &mut candidates);
}
Some(LangItem::TransmuteTrait) => {
// User-defined transmutability impls are permitted.
self.assemble_candidates_from_impls(obligation, &mut candidates);
self.assemble_candidates_for_transmutability(obligation, &mut candidates);
}
Some(LangItem::Tuple) => {
self.assemble_candidate_for_tuple(obligation, &mut candidates);
}
Some(LangItem::FnPtrTrait) => {
self.assemble_candidates_for_fn_ptr_trait(obligation, &mut candidates);
}
Some(LangItem::BikeshedGuaranteedNoDrop) => {
self.assemble_candidates_for_bikeshed_guaranteed_no_drop_trait(
obligation,
&mut candidates,
);
}
_ => {
// We re-match here for traits that can have both builtin impls and user written impls.
// After the builtin impls we need to also add user written impls, which we do not want to
// do in general because just checking if there are any is expensive.
match lang_item {
Some(LangItem::Coroutine) => {
self.assemble_coroutine_candidates(obligation, &mut candidates);
}
Some(LangItem::Future) => {
self.assemble_future_candidates(obligation, &mut candidates);
}
Some(LangItem::Iterator) => {
self.assemble_iterator_candidates(obligation, &mut candidates);
}
Some(LangItem::FusedIterator) => {
self.assemble_fused_iterator_candidates(obligation, &mut candidates);
}
Some(LangItem::AsyncIterator) => {
self.assemble_async_iterator_candidates(obligation, &mut candidates);
}
Some(LangItem::AsyncFnKindHelper) => {
self.assemble_async_fn_kind_helper_candidates(
obligation,
&mut candidates,
);
}
Some(LangItem::AsyncFn | LangItem::AsyncFnMut | LangItem::AsyncFnOnce) => {
self.assemble_async_closure_candidates(obligation, &mut candidates);
}
Some(LangItem::Fn | LangItem::FnMut | LangItem::FnOnce) => {
self.assemble_closure_candidates(obligation, &mut candidates);
self.assemble_fn_pointer_candidates(obligation, &mut candidates);
}
_ => {}
}
// FIXME: Put these into `else if` blocks above, since they're built-in.
self.assemble_closure_candidates(obligation, &mut candidates);
self.assemble_async_closure_candidates(obligation, &mut candidates);
self.assemble_fn_pointer_candidates(obligation, &mut candidates);
self.assemble_candidates_from_impls(obligation, &mut candidates);
self.assemble_candidates_from_object_ty(obligation, &mut candidates);
self.assemble_candidates_from_impls(obligation, &mut candidates);
self.assemble_candidates_from_object_ty(obligation, &mut candidates);
}
}
self.assemble_candidates_from_projected_tys(obligation, &mut candidates);
@ -360,9 +381,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &PolyTraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
let Some(kind) = self.tcx().fn_trait_kind_from_def_id(obligation.predicate.def_id()) else {
return;
};
let kind = self.tcx().fn_trait_kind_from_def_id(obligation.predicate.def_id()).unwrap();
// Okay to skip binder because the args on closure types never
// touch bound regions, they just capture the in-scope
@ -424,11 +443,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &PolyTraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
let Some(goal_kind) =
self.tcx().async_fn_trait_kind_from_def_id(obligation.predicate.def_id())
else {
return;
};
let goal_kind =
self.tcx().async_fn_trait_kind_from_def_id(obligation.predicate.def_id()).unwrap();
match *obligation.self_ty().skip_binder().kind() {
ty::CoroutineClosure(_, args) => {
@ -501,11 +517,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &PolyTraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
// We provide impl of all fn traits for fn pointers.
if !self.tcx().is_fn_trait(obligation.predicate.def_id()) {
return;
}
// Keep this function in sync with extract_tupled_inputs_and_output_from_callable
// until the old solver (and thus this function) is removed.

View file

@ -251,16 +251,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let tcx = self.tcx();
let obligations = if has_nested {
let trait_def = obligation.predicate.def_id();
let conditions = if tcx.is_lang_item(trait_def, LangItem::Sized) {
self.sized_conditions(obligation)
} else if tcx.is_lang_item(trait_def, LangItem::Copy) {
self.copy_clone_conditions(obligation)
} else if tcx.is_lang_item(trait_def, LangItem::Clone) {
self.copy_clone_conditions(obligation)
} else if tcx.is_lang_item(trait_def, LangItem::FusedIterator) {
self.fused_iterator_conditions(obligation)
} else {
bug!("unexpected builtin trait {:?}", trait_def)
let conditions = match tcx.as_lang_item(trait_def) {
Some(LangItem::Sized) => self.sized_conditions(obligation),
Some(LangItem::Copy | LangItem::Clone) => self.copy_clone_conditions(obligation),
Some(LangItem::FusedIterator) => self.fused_iterator_conditions(obligation),
other => bug!("unexpected builtin trait {trait_def:?} ({other:?})"),
};
let BuiltinImplConditions::Where(types) = conditions else {
bug!("obligation {:?} had matched a builtin impl but now doesn't", obligation);

View file

@ -1312,6 +1312,8 @@ impl<T, A: Allocator> VecDeque<T, A> {
///
/// If [`make_contiguous`] was previously called, all elements of the
/// deque will be in the first slice and the second slice will be empty.
/// Otherwise, the exact split point depends on implementation details
/// and is not guaranteed.
///
/// [`make_contiguous`]: VecDeque::make_contiguous
///
@ -1326,12 +1328,18 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// deque.push_back(1);
/// deque.push_back(2);
///
/// assert_eq!(deque.as_slices(), (&[0, 1, 2][..], &[][..]));
/// let expected = [0, 1, 2];
/// let (front, back) = deque.as_slices();
/// assert_eq!(&expected[..front.len()], front);
/// assert_eq!(&expected[front.len()..], back);
///
/// deque.push_front(10);
/// deque.push_front(9);
///
/// assert_eq!(deque.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
/// let expected = [9, 10, 0, 1, 2];
/// let (front, back) = deque.as_slices();
/// assert_eq!(&expected[..front.len()], front);
/// assert_eq!(&expected[front.len()..], back);
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]
@ -1347,6 +1355,8 @@ impl<T, A: Allocator> VecDeque<T, A> {
///
/// If [`make_contiguous`] was previously called, all elements of the
/// deque will be in the first slice and the second slice will be empty.
/// Otherwise, the exact split point depends on implementation details
/// and is not guaranteed.
///
/// [`make_contiguous`]: VecDeque::make_contiguous
///
@ -1363,9 +1373,22 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// deque.push_front(10);
/// deque.push_front(9);
///
/// deque.as_mut_slices().0[0] = 42;
/// deque.as_mut_slices().1[0] = 24;
/// assert_eq!(deque.as_slices(), (&[42, 10][..], &[24, 1][..]));
/// // Since the split point is not guaranteed, we may need to update
/// // either slice.
/// let mut update_nth = |index: usize, val: u32| {
/// let (front, back) = deque.as_mut_slices();
/// if index > front.len() - 1 {
/// back[index - front.len()] = val;
/// } else {
/// front[index] = val;
/// }
/// };
///
/// update_nth(0, 42);
/// update_nth(2, 24);
///
/// let v: Vec<_> = deque.into();
/// assert_eq!(v, [42, 10, 24, 1]);
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]

View file

@ -69,7 +69,7 @@ use crate::boxed::Box;
use crate::vec::Vec;
impl<T> [T] {
/// Sorts the slice, preserving initial order of equal elements.
/// Sorts the slice in ascending order, preserving initial order of equal elements.
///
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*))
/// worst-case.
@ -137,7 +137,8 @@ impl<T> [T] {
stable_sort(self, T::lt);
}
/// Sorts the slice with a comparison function, preserving initial order of equal elements.
/// Sorts the slice in ascending order with a comparison function, preserving initial order of
/// equal elements.
///
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*))
/// worst-case.
@ -197,7 +198,8 @@ impl<T> [T] {
stable_sort(self, |a, b| compare(a, b) == Less);
}
/// Sorts the slice with a key extraction function, preserving initial order of equal elements.
/// Sorts the slice in ascending order with a key extraction function, preserving initial order
/// of equal elements.
///
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*))
/// worst-case, where the key function is *O*(*m*).
@ -252,7 +254,8 @@ impl<T> [T] {
stable_sort(self, |a, b| f(a).lt(&f(b)));
}
/// Sorts the slice with a key extraction function, preserving initial order of equal elements.
/// Sorts the slice in ascending order with a key extraction function, preserving initial order
/// of equal elements.
///
/// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \*
/// log(*n*)) worst-case, where the key function is *O*(*m*).

View file

@ -320,6 +320,7 @@ impl str {
/// ```
#[cfg(not(no_global_oom_handling))]
#[rustc_allow_incoherent_impl]
#[doc(alias = "replace_first")]
#[must_use = "this returns the replaced string as a new allocation, \
without modifying the original"]
#[stable(feature = "str_replacen", since = "1.16.0")]

View file

@ -28,7 +28,7 @@ pub mod c_str;
issue = "44930",
reason = "the `c_variadic` feature has not been properly tested on all supported platforms"
)]
pub use self::va_list::{VaList, VaListImpl};
pub use self::va_list::{VaArgSafe, VaList, VaListImpl};
#[unstable(
feature = "c_variadic",

View file

@ -35,7 +35,7 @@ type_alias! { "c_float.md", c_float = f32; }
type_alias! { "c_double.md", c_double = f64; }
mod c_char_definition {
crate::cfg_match! {
crate::cfg_select! {
// These are the targets on which c_char is unsigned. Usually the
// signedness is the same for all target_os values on a given architecture
// but there are some exceptions (see isSignedCharDefault() in clang).
@ -133,7 +133,7 @@ mod c_char_definition {
}
mod c_long_definition {
crate::cfg_match! {
crate::cfg_select! {
any(
all(target_pointer_width = "64", not(windows)),
// wasm32 Linux ABI uses 64-bit long
@ -172,7 +172,7 @@ pub type c_ptrdiff_t = isize;
pub type c_ssize_t = isize;
mod c_int_definition {
crate::cfg_match! {
crate::cfg_select! {
any(target_arch = "avr", target_arch = "msp430") => {
pub(super) type c_int = i16;
pub(super) type c_uint = u16;

View file

@ -223,39 +223,57 @@ impl<'a, 'f: 'a> DerefMut for VaList<'a, 'f> {
}
}
// The VaArgSafe trait needs to be used in public interfaces, however, the trait
// itself must not be allowed to be used outside this module. Allowing users to
// implement the trait for a new type (thereby allowing the va_arg intrinsic to
// be used on a new type) is likely to cause undefined behavior.
//
// FIXME(dlrobertson): In order to use the VaArgSafe trait in a public interface
// but also ensure it cannot be used elsewhere, the trait needs to be public
// within a private module. Once RFC 2145 has been implemented look into
// improving this.
mod sealed_trait {
/// Trait which permits the allowed types to be used with [super::VaListImpl::arg].
pub unsafe trait VaArgSafe {}
mod sealed {
pub trait Sealed {}
impl Sealed for i32 {}
impl Sealed for i64 {}
impl Sealed for isize {}
impl Sealed for u32 {}
impl Sealed for u64 {}
impl Sealed for usize {}
impl Sealed for f64 {}
impl<T> Sealed for *mut T {}
impl<T> Sealed for *const T {}
}
macro_rules! impl_va_arg_safe {
($($t:ty),+) => {
$(
unsafe impl sealed_trait::VaArgSafe for $t {}
)+
}
}
/// Trait which permits the allowed types to be used with [`VaListImpl::arg`].
///
/// # Safety
///
/// This trait must only be implemented for types that C passes as varargs without implicit promotion.
///
/// In C varargs, integers smaller than [`c_int`] and floats smaller than [`c_double`]
/// are implicitly promoted to [`c_int`] and [`c_double`] respectively. Implementing this trait for
/// types that are subject to this promotion rule is invalid.
///
/// [`c_int`]: core::ffi::c_int
/// [`c_double`]: core::ffi::c_double
pub unsafe trait VaArgSafe: sealed::Sealed {}
impl_va_arg_safe! {i8, i16, i32, i64, usize}
impl_va_arg_safe! {u8, u16, u32, u64, isize}
impl_va_arg_safe! {f64}
// i8 and i16 are implicitly promoted to c_int in C, and cannot implement `VaArgSafe`.
unsafe impl VaArgSafe for i32 {}
unsafe impl VaArgSafe for i64 {}
unsafe impl VaArgSafe for isize {}
unsafe impl<T> sealed_trait::VaArgSafe for *mut T {}
unsafe impl<T> sealed_trait::VaArgSafe for *const T {}
// u8 and u16 are implicitly promoted to c_int in C, and cannot implement `VaArgSafe`.
unsafe impl VaArgSafe for u32 {}
unsafe impl VaArgSafe for u64 {}
unsafe impl VaArgSafe for usize {}
// f32 is implicitly promoted to c_double in C, and cannot implement `VaArgSafe`.
unsafe impl VaArgSafe for f64 {}
unsafe impl<T> VaArgSafe for *mut T {}
unsafe impl<T> VaArgSafe for *const T {}
impl<'f> VaListImpl<'f> {
/// Advance to the next arg.
#[inline]
pub unsafe fn arg<T: sealed_trait::VaArgSafe>(&mut self) -> T {
pub unsafe fn arg<T: VaArgSafe>(&mut self) -> T {
// SAFETY: the caller must uphold the safety contract for `va_arg`.
unsafe { va_arg(self) }
}
@ -317,4 +335,4 @@ unsafe fn va_copy<'f>(dest: *mut VaListImpl<'f>, src: &VaListImpl<'f>);
/// argument `ap` points to.
#[rustc_intrinsic]
#[rustc_nounwind]
unsafe fn va_arg<T: sealed_trait::VaArgSafe>(ap: &mut VaListImpl<'_>) -> T;
unsafe fn va_arg<T: VaArgSafe>(ap: &mut VaListImpl<'_>) -> T;

View file

@ -100,7 +100,7 @@
#![feature(bigint_helper_methods)]
#![feature(bstr)]
#![feature(bstr_internals)]
#![feature(cfg_match)]
#![feature(cfg_select)]
#![feature(cfg_target_has_reliable_f16_f128)]
#![feature(const_carrying_mul_add)]
#![feature(const_eval_select)]
@ -235,8 +235,8 @@ pub mod autodiff {
#[unstable(feature = "contracts", issue = "128044")]
pub mod contracts;
#[unstable(feature = "cfg_match", issue = "115585")]
pub use crate::macros::cfg_match;
#[unstable(feature = "cfg_select", issue = "115585")]
pub use crate::macros::cfg_select;
#[macro_use]
mod internal_macros;

View file

@ -210,9 +210,9 @@ pub macro assert_matches {
/// # Example
///
/// ```
/// #![feature(cfg_match)]
/// #![feature(cfg_select)]
///
/// cfg_match! {
/// cfg_select! {
/// unix => {
/// fn foo() { /* unix specific functionality */ }
/// }
@ -228,19 +228,19 @@ pub macro assert_matches {
/// If desired, it is possible to return expressions through the use of surrounding braces:
///
/// ```
/// #![feature(cfg_match)]
/// #![feature(cfg_select)]
///
/// let _some_string = cfg_match! {{
/// let _some_string = cfg_select! {{
/// unix => { "With great power comes great electricity bills" }
/// _ => { "Behind every successful diet is an unwatched pizza" }
/// }};
/// ```
#[unstable(feature = "cfg_match", issue = "115585")]
#[rustc_diagnostic_item = "cfg_match"]
#[unstable(feature = "cfg_select", issue = "115585")]
#[rustc_diagnostic_item = "cfg_select"]
#[rustc_macro_transparency = "semitransparent"]
pub macro cfg_match {
pub macro cfg_select {
({ $($tt:tt)* }) => {{
$crate::cfg_match! { $($tt)* }
$crate::cfg_select! { $($tt)* }
}},
(_ => { $($output:tt)* }) => {
$($output)*
@ -250,10 +250,10 @@ pub macro cfg_match {
$($( $rest:tt )+)?
) => {
#[cfg($cfg)]
$crate::cfg_match! { _ => $output }
$crate::cfg_select! { _ => $output }
$(
#[cfg(not($cfg))]
$crate::cfg_match! { $($rest)+ }
$crate::cfg_select! { $($rest)+ }
)?
},
}

View file

@ -14,7 +14,7 @@
use crate::convert::FloatToInt;
use crate::num::FpCategory;
use crate::panic::const_assert;
use crate::{cfg_match, intrinsics, mem};
use crate::{cfg_select, intrinsics, mem};
/// The radix or base of the internal representation of `f32`.
/// Use [`f32::RADIX`] instead.
@ -990,7 +990,7 @@ impl f32 {
#[stable(feature = "num_midpoint", since = "1.85.0")]
#[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")]
pub const fn midpoint(self, other: f32) -> f32 {
cfg_match! {
cfg_select! {
// Allow faster implementation that have known good 64-bit float
// implementations. Falling back to the branchy code on targets that don't
// have 64-bit hardware floats or buggy implementations.

View file

@ -66,6 +66,34 @@ impl<T: ?Sized> *const T {
self as _
}
/// Try to cast to a pointer of another type by checking aligment.
///
/// If the pointer is properly aligned to the target type, it will be
/// cast to the target type. Otherwise, `None` is returned.
///
/// # Examples
///
/// ```rust
/// #![feature(pointer_try_cast_aligned)]
///
/// let aligned: *const u8 = 0x1000 as _;
///
/// // i32 has at most 4-byte alignment, so this will succeed
/// assert!(aligned.try_cast_aligned::<i32>().is_some());
///
/// let unaligned: *const u8 = 0x1001 as _;
///
/// // i32 has at least 2-byte alignment, so this will fail
/// assert!(unaligned.try_cast_aligned::<i32>().is_none());
/// ```
#[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn try_cast_aligned<U>(self) -> Option<*const U> {
if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
}
/// Uses the address value in a new pointer of another type.
///
/// This operation will ignore the address part of its `meta` operand and discard existing

View file

@ -48,6 +48,34 @@ impl<T: ?Sized> *mut T {
self as _
}
/// Try to cast to a pointer of another type by checking aligment.
///
/// If the pointer is properly aligned to the target type, it will be
/// cast to the target type. Otherwise, `None` is returned.
///
/// # Examples
///
/// ```rust
/// #![feature(pointer_try_cast_aligned)]
///
/// let aligned: *mut u8 = 0x1000 as _;
///
/// // i32 has at most 4-byte alignment, so this will succeed
/// assert!(aligned.try_cast_aligned::<i32>().is_some());
///
/// let unaligned: *mut u8 = 0x1001 as _;
///
/// // i32 has at least 2-byte alignment, so this will fail
/// assert!(unaligned.try_cast_aligned::<i32>().is_none());
/// ```
#[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
}
/// Uses the address value in a new pointer of another type.
///
/// This operation will ignore the address part of its `meta` operand and discard existing

View file

@ -490,6 +490,35 @@ impl<T: ?Sized> NonNull<T> {
unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
}
/// Try to cast to a pointer of another type by checking aligment.
///
/// If the pointer is properly aligned to the target type, it will be
/// cast to the target type. Otherwise, `None` is returned.
///
/// # Examples
///
/// ```rust
/// #![feature(pointer_try_cast_aligned)]
/// use std::ptr::NonNull;
///
/// let aligned: NonNull<u8> = NonNull::new(0x1000 as _).unwrap();
///
/// // i32 has at most 4-byte alignment, so this will succeed
/// assert!(aligned.try_cast_aligned::<i32>().is_some());
///
/// let unaligned: NonNull<u8> = NonNull::new(0x1001 as _).unwrap();
///
/// // i32 has at least 2-byte alignment, so this will fail
/// assert!(unaligned.try_cast_aligned::<i32>().is_none());
/// ```
#[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
}
/// Adds an offset to a pointer.
///
/// `count` is in units of T; e.g., a `count` of 3 represents a pointer

View file

@ -2986,7 +2986,7 @@ impl<T> [T] {
self.binary_search_by(|k| f(k).cmp(b))
}
/// Sorts the slice **without** preserving the initial order of equal elements.
/// Sorts the slice in ascending order **without** preserving the initial order of equal elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place (i.e., does not
/// allocate), and *O*(*n* \* log(*n*)) worst-case.
@ -3047,8 +3047,8 @@ impl<T> [T] {
sort::unstable::sort(self, &mut T::lt);
}
/// Sorts the slice with a comparison function, **without** preserving the initial order of
/// equal elements.
/// Sorts the slice in ascending order with a comparison function, **without** preserving the
/// initial order of equal elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place (i.e., does not
/// allocate), and *O*(*n* \* log(*n*)) worst-case.
@ -3102,8 +3102,8 @@ impl<T> [T] {
sort::unstable::sort(self, &mut |a, b| compare(a, b) == Ordering::Less);
}
/// Sorts the slice with a key extraction function, **without** preserving the initial order of
/// equal elements.
/// Sorts the slice in ascending order with a key extraction function, **without** preserving
/// the initial order of equal elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place (i.e., does not
/// allocate), and *O*(*n* \* log(*n*)) worst-case.

View file

@ -6,7 +6,7 @@
//! for pivot selection. Using this as a fallback ensures O(n) worst case running time with
//! better performance than one would get using heapsort as fallback.
use crate::cfg_match;
use crate::cfg_select;
use crate::mem::{self, SizedTypeProperties};
#[cfg(not(feature = "optimize_for_size"))]
use crate::slice::sort::shared::pivot::choose_pivot;
@ -42,7 +42,7 @@ where
let min_idx = min_index(v, &mut is_less).unwrap();
v.swap(min_idx, index);
} else {
cfg_match! {
cfg_select! {
feature = "optimize_for_size" => {
median_of_medians(v, &mut is_less, index);
}

View file

@ -7,7 +7,7 @@ use crate::mem::{MaybeUninit, SizedTypeProperties};
use crate::slice::sort::shared::smallsort::{
SMALL_SORT_GENERAL_SCRATCH_LEN, StableSmallSortTypeImpl, insertion_sort_shift_left,
};
use crate::{cfg_match, intrinsics};
use crate::{cfg_select, intrinsics};
pub(crate) mod merge;
@ -39,13 +39,13 @@ pub fn sort<T, F: FnMut(&T, &T) -> bool, BufT: BufGuard<T>>(v: &mut [T], is_less
return;
}
cfg_match! {
cfg_select! {
any(feature = "optimize_for_size", target_pointer_width = "16") => {
// Unlike driftsort, mergesort only requires len / 2,
// not len - len / 2.
let alloc_len = len / 2;
cfg_match! {
cfg_select! {
target_pointer_width = "16" => {
let mut heap_buf = BufT::with_capacity(alloc_len);
let scratch = heap_buf.as_uninit_slice_mut();

View file

@ -5,7 +5,7 @@ use crate::mem::SizedTypeProperties;
use crate::slice::sort::shared::find_existing_run;
#[cfg(not(any(feature = "optimize_for_size", target_pointer_width = "16")))]
use crate::slice::sort::shared::smallsort::insertion_sort_shift_left;
use crate::{cfg_match, intrinsics};
use crate::{cfg_select, intrinsics};
pub(crate) mod heapsort;
pub(crate) mod quicksort;
@ -30,7 +30,7 @@ pub fn sort<T, F: FnMut(&T, &T) -> bool>(v: &mut [T], is_less: &mut F) {
return;
}
cfg_match! {
cfg_select! {
any(feature = "optimize_for_size", target_pointer_width = "16") => {
heapsort::heapsort(v, is_less);
}

View file

@ -9,7 +9,7 @@ use crate::slice::sort::shared::pivot::choose_pivot;
use crate::slice::sort::shared::smallsort::UnstableSmallSortTypeImpl;
#[cfg(not(feature = "optimize_for_size"))]
use crate::slice::sort::unstable::heapsort;
use crate::{cfg_match, intrinsics, ptr};
use crate::{cfg_select, intrinsics, ptr};
/// Sorts `v` recursively.
///
@ -142,7 +142,7 @@ const fn inst_partition<T, F: FnMut(&T, &T) -> bool>() -> fn(&mut [T], &T, &mut
if size_of::<T>() <= MAX_BRANCHLESS_PARTITION_SIZE {
// Specialize for types that are relatively cheap to copy, where branchless optimizations
// have large leverage e.g. `u64` and `String`.
cfg_match! {
cfg_select! {
feature = "optimize_for_size" => {
partition_lomuto_branchless_simple::<T, F>
}

View file

@ -1,6 +1,6 @@
// tidy-alphabetical-start
#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))]
#![cfg_attr(test, feature(cfg_match))]
#![cfg_attr(test, feature(cfg_select))]
#![feature(alloc_layout_extra)]
#![feature(array_chunks)]
#![feature(array_ptr_get)]

View file

@ -9,7 +9,7 @@ trait Trait {
struct Struct;
impl Trait for Struct {
cfg_match! {
cfg_select! {
feature = "blah" => {
fn blah(&self) {
unimplemented!();
@ -45,22 +45,22 @@ fn matches_leading_pipe() {
}
#[test]
fn cfg_match_basic() {
cfg_match! {
fn cfg_select_basic() {
cfg_select! {
target_pointer_width = "64" => { fn f0_() -> bool { true }}
}
cfg_match! {
cfg_select! {
unix => { fn f1_() -> bool { true } }
any(target_os = "macos", target_os = "linux") => { fn f1_() -> bool { false }}
}
cfg_match! {
cfg_select! {
target_pointer_width = "32" => { fn f2_() -> bool { false } }
target_pointer_width = "64" => { fn f2_() -> bool { true } }
}
cfg_match! {
cfg_select! {
target_pointer_width = "16" => { fn f3_() -> i32 { 1 } }
_ => { fn f3_() -> i32 { 2 }}
}
@ -81,8 +81,8 @@ fn cfg_match_basic() {
}
#[test]
fn cfg_match_debug_assertions() {
cfg_match! {
fn cfg_select_debug_assertions() {
cfg_select! {
debug_assertions => {
assert!(cfg!(debug_assertions));
assert_eq!(4, 2+2);
@ -96,8 +96,8 @@ fn cfg_match_debug_assertions() {
#[cfg(target_pointer_width = "64")]
#[test]
fn cfg_match_no_duplication_on_64() {
cfg_match! {
fn cfg_select_no_duplication_on_64() {
cfg_select! {
windows => {
fn foo() {}
}
@ -112,8 +112,8 @@ fn cfg_match_no_duplication_on_64() {
}
#[test]
fn cfg_match_options() {
cfg_match! {
fn cfg_select_options() {
cfg_select! {
test => {
use core::option::Option as Option2;
fn works1() -> Option2<u32> { Some(1) }
@ -121,25 +121,25 @@ fn cfg_match_options() {
_ => { fn works1() -> Option<u32> { None } }
}
cfg_match! {
cfg_select! {
feature = "foo" => { fn works2() -> bool { false } }
test => { fn works2() -> bool { true } }
_ => { fn works2() -> bool { false } }
}
cfg_match! {
cfg_select! {
feature = "foo" => { fn works3() -> bool { false } }
_ => { fn works3() -> bool { true } }
}
cfg_match! {
cfg_select! {
test => {
use core::option::Option as Option3;
fn works4() -> Option3<u32> { Some(1) }
}
}
cfg_match! {
cfg_select! {
feature = "foo" => { fn works5() -> bool { false } }
test => { fn works5() -> bool { true } }
}
@ -152,8 +152,8 @@ fn cfg_match_options() {
}
#[test]
fn cfg_match_two_functions() {
cfg_match! {
fn cfg_select_two_functions() {
cfg_select! {
target_pointer_width = "64" => {
fn foo1() {}
fn bar1() {}
@ -177,7 +177,7 @@ fn cfg_match_two_functions() {
}
fn _accepts_expressions() -> i32 {
cfg_match! {
cfg_select! {
unix => { 1 }
_ => { 2 }
}
@ -188,14 +188,14 @@ fn _accepts_expressions() -> i32 {
fn _allows_stmt_expr_attributes() {
let one = 1;
let two = 2;
cfg_match! {
cfg_select! {
unix => { one * two; }
_ => { one + two; }
}
}
fn _expression() {
let _ = cfg_match!({
let _ = cfg_select!({
windows => {
" XP"
}

View file

@ -172,7 +172,7 @@ pub use core::ffi::c_void;
all supported platforms",
issue = "44930"
)]
pub use core::ffi::{VaList, VaListImpl};
pub use core::ffi::{VaArgSafe, VaList, VaListImpl};
#[stable(feature = "core_ffi_c", since = "1.64.0")]
pub use core::ffi::{
c_char, c_double, c_float, c_int, c_long, c_longlong, c_schar, c_short, c_uchar, c_uint,

View file

@ -700,8 +700,8 @@ mod panicking;
#[allow(dead_code, unused_attributes, fuzzy_provenance_casts, unsafe_op_in_unsafe_fn)]
mod backtrace_rs;
#[unstable(feature = "cfg_match", issue = "115585")]
pub use core::cfg_match;
#[unstable(feature = "cfg_select", issue = "115585")]
pub use core::cfg_select;
#[unstable(
feature = "concat_bytes",
issue = "87555",

View file

@ -8,6 +8,7 @@ use cfg_if::cfg_if;
use crate::ffi::OsStr;
use crate::os::unix::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd};
use crate::path::Path;
use crate::sealed::Sealed;
use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner};
use crate::{io, process, sys};
@ -197,6 +198,18 @@ pub trait CommandExt: Sealed {
/// ```
#[stable(feature = "process_set_process_group", since = "1.64.0")]
fn process_group(&mut self, pgroup: i32) -> &mut process::Command;
/// Set the root of the child process. This calls `chroot` in the child process before executing
/// the command.
///
/// This happens before changing to the directory specified with
/// [`process::Command::current_dir`], and that directory will be relative to the new root.
///
/// If no directory has been specified with [`process::Command::current_dir`], this will set the
/// directory to `/`, to avoid leaving the current directory outside the chroot. (This is an
/// intentional difference from the underlying `chroot` system call.)
#[unstable(feature = "process_chroot", issue = "141298")]
fn chroot<P: AsRef<Path>>(&mut self, dir: P) -> &mut process::Command;
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -242,6 +255,11 @@ impl CommandExt for process::Command {
self.as_inner_mut().pgroup(pgroup);
self
}
fn chroot<P: AsRef<Path>>(&mut self, dir: P) -> &mut process::Command {
self.as_inner_mut().chroot(dir.as_ref());
self
}
}
/// Unix-specific extensions to [`process::ExitStatus`] and

View file

@ -88,6 +88,7 @@ pub struct Command {
program_kind: ProgramKind,
cwd: Option<CString>,
chroot: Option<CString>,
uid: Option<uid_t>,
gid: Option<gid_t>,
saw_nul: bool,
@ -182,6 +183,7 @@ impl Command {
program_kind,
env: Default::default(),
cwd: None,
chroot: None,
uid: None,
gid: None,
saw_nul,
@ -206,6 +208,7 @@ impl Command {
program_kind,
env: Default::default(),
cwd: None,
chroot: None,
uid: None,
gid: None,
saw_nul,
@ -254,6 +257,12 @@ impl Command {
pub fn pgroup(&mut self, pgroup: pid_t) {
self.pgroup = Some(pgroup);
}
pub fn chroot(&mut self, dir: &Path) {
self.chroot = Some(os2c(dir.as_os_str(), &mut self.saw_nul));
if self.cwd.is_none() {
self.cwd(&OsStr::new("/"));
}
}
#[cfg(target_os = "linux")]
pub fn create_pidfd(&mut self, val: bool) {
@ -326,6 +335,10 @@ impl Command {
pub fn get_pgroup(&self) -> Option<pid_t> {
self.pgroup
}
#[allow(dead_code)]
pub fn get_chroot(&self) -> Option<&CStr> {
self.chroot.as_deref()
}
pub fn get_closures(&mut self) -> &mut Vec<Box<dyn FnMut() -> io::Result<()> + Send + Sync>> {
&mut self.closures

View file

@ -323,6 +323,15 @@ impl Command {
cvt(libc::setuid(u as uid_t))?;
}
}
if let Some(chroot) = self.get_chroot() {
#[cfg(not(target_os = "fuchsia"))]
cvt(libc::chroot(chroot.as_ptr()))?;
#[cfg(target_os = "fuchsia")]
return Err(io::const_error!(
io::ErrorKind::Unsupported,
"chroot not supported by fuchsia"
));
}
if let Some(cwd) = self.get_cwd() {
cvt(libc::chdir(cwd.as_ptr()))?;
}
@ -447,6 +456,7 @@ impl Command {
|| (self.env_saw_path() && !self.program_is_path())
|| !self.get_closures().is_empty()
|| self.get_groups().is_some()
|| self.get_chroot().is_some()
{
return Ok(None);
}

View file

@ -27,6 +27,12 @@ impl Command {
"nul byte found in provided data",
));
}
if self.get_chroot().is_some() {
return Err(io::const_error!(
ErrorKind::Unsupported,
"chroot not supported by vxworks",
));
}
let (ours, theirs) = self.setup_io(default, needs_stdin)?;
let mut p = Process { pid: 0, status: None };

@ -1 +1 @@
Subproject commit 1dfaa4db2479753a46a3e90f2c3c89d89d0b21f1
Subproject commit b6e2249e388f520627544812649b77b0944e1a2e

View file

@ -89,9 +89,9 @@ dependencies = [
[[package]]
name = "cc"
version = "1.2.17"
version = "1.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a"
checksum = "5f4ac86a9e5bc1e2b3449ab9d7d3a6a405e3d1bb28d7b9be8614f55846ae3766"
dependencies = [
"shlex",
]

View file

@ -32,7 +32,7 @@ test = false
# Most of the time updating these dependencies requires modifications to the
# bootstrap codebase(e.g., https://github.com/rust-lang/rust/issues/124565);
# otherwise, some targets will fail. That's why these dependencies are explicitly pinned.
cc = "=1.2.17"
cc = "=1.2.23"
cmake = "=0.1.54"
build_helper = { path = "../build_helper" }

View file

@ -1,3 +1,4 @@
use std::env::consts::EXE_EXTENSION;
use std::fmt::{Display, Formatter};
use crate::core::build_steps::compile::{Std, Sysroot};
@ -160,7 +161,10 @@ Consider setting `rust.debuginfo-level = 1` in `bootstrap.toml`."#);
}
let sysroot = builder.ensure(Sysroot::new(compiler));
let rustc = sysroot.join("bin/rustc");
let mut rustc = sysroot.clone();
rustc.push("bin");
rustc.push("rustc");
rustc.set_extension(EXE_EXTENSION);
let rustc_perf_dir = builder.build.tempdir().join("rustc-perf");
let results_dir = rustc_perf_dir.join("results");

View file

@ -22,43 +22,13 @@
//! everything.
use std::collections::HashSet;
use std::iter;
use std::path::{Path, PathBuf};
use std::{env, iter};
use crate::core::config::TargetSelection;
use crate::utils::exec::{BootstrapCommand, command};
use crate::{Build, CLang, GitRepo};
/// Finds archiver tool for the given target if possible.
/// FIXME(onur-ozkan): This logic should be replaced by calling into the `cc` crate.
fn cc2ar(cc: &Path, target: TargetSelection, default_ar: PathBuf) -> Option<PathBuf> {
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace('-', "_"))) {
Some(PathBuf::from(ar))
} else if let Some(ar) = env::var_os("AR") {
Some(PathBuf::from(ar))
} else if target.is_msvc() {
None
} else if target.contains("musl") || target.contains("openbsd") {
Some(PathBuf::from("ar"))
} else if target.contains("vxworks") {
Some(PathBuf::from("wr-ar"))
} else if target.contains("-nto-") {
if target.starts_with("i586") {
Some(PathBuf::from("ntox86-ar"))
} else if target.starts_with("aarch64") {
Some(PathBuf::from("ntoaarch64-ar"))
} else if target.starts_with("x86_64") {
Some(PathBuf::from("ntox86_64-ar"))
} else {
panic!("Unknown architecture, cannot determine archiver for Neutrino QNX");
}
} else if target.contains("android") || target.contains("-wasi") {
Some(cc.parent().unwrap().join(PathBuf::from("llvm-ar")))
} else {
Some(default_ar)
}
}
/// Creates and configures a new [`cc::Build`] instance for the given target.
fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build {
let mut cfg = cc::Build::new();
@ -140,7 +110,7 @@ pub fn find_target(build: &Build, target: TargetSelection) {
let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
ar
} else {
cc2ar(compiler.path(), target, PathBuf::from(cfg.get_archiver().get_program()))
cfg.try_get_archiver().map(|c| PathBuf::from(c.get_program())).ok()
};
build.cc.borrow_mut().insert(target, compiler.clone());

View file

@ -5,119 +5,6 @@ use super::*;
use crate::core::config::{Target, TargetSelection};
use crate::{Build, Config, Flags};
#[test]
fn test_cc2ar_env_specific() {
let triple = "x86_64-unknown-linux-gnu";
let key = "AR_x86_64_unknown_linux_gnu";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::set_var(key, "custom-ar") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var(key) };
assert_eq!(result, Some(PathBuf::from("custom-ar")));
}
#[test]
fn test_cc2ar_musl() {
let triple = "x86_64-unknown-linux-musl";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_linux_musl") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ar")));
}
#[test]
fn test_cc2ar_openbsd() {
let triple = "x86_64-unknown-openbsd";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_openbsd") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/cc");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ar")));
}
#[test]
fn test_cc2ar_vxworks() {
let triple = "armv7-wrs-vxworks";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_armv7_wrs_vxworks") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("wr-ar")));
}
#[test]
fn test_cc2ar_nto_i586() {
let triple = "i586-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_i586_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntox86-ar")));
}
#[test]
fn test_cc2ar_nto_aarch64() {
let triple = "aarch64-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_aarch64_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntoaarch64-ar")));
}
#[test]
fn test_cc2ar_nto_x86_64() {
let triple = "x86_64-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_x86_64_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let result = cc2ar(cc, target, default_ar);
assert_eq!(result, Some(PathBuf::from("ntox86_64-ar")));
}
#[test]
#[should_panic(expected = "Unknown architecture, cannot determine archiver for Neutrino QNX")]
fn test_cc2ar_nto_unknown() {
let triple = "powerpc-unknown-nto-something";
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR_powerpc_unknown_nto_something") };
// SAFETY: bootstrap tests run on a single thread
unsafe { env::remove_var("AR") };
let target = TargetSelection::from_user(triple);
let cc = Path::new("/usr/bin/clang");
let default_ar = PathBuf::from("default-ar");
let _ = cc2ar(cc, target, default_ar);
}
#[test]
fn test_ndk_compiler_c() {
let ndk_path = PathBuf::from("/ndk");

View file

@ -28,6 +28,7 @@ RUN /scripts/android-sdk.sh
ENV PATH=$PATH:/android/sdk/emulator
ENV PATH=$PATH:/android/sdk/tools
ENV PATH=$PATH:/android/sdk/platform-tools
ENV PATH=$PATH:/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/bin
ENV TARGETS=arm-linux-androideabi

View file

@ -22,6 +22,8 @@ ENV RUST_CONFIGURE_ARGS \
--android-ndk=/android/ndk/ \
--disable-docs
ENV PATH=$PATH:/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/bin
ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS
COPY scripts/sccache.sh /scripts/

View file

@ -0,0 +1,35 @@
FROM ghcr.io/rust-lang/ubuntu:22.04
COPY scripts/cross-apt-packages.sh /scripts/
RUN sh /scripts/cross-apt-packages.sh
COPY scripts/crosstool-ng.sh /scripts/
RUN sh /scripts/crosstool-ng.sh
WORKDIR /build
COPY scripts/rustbuild-setup.sh /scripts/
RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-arm-linux-gnueabi/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \
AR_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-ar \
CXX_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-g++
ENV HOSTS=arm-unknown-linux-gnueabi
ENV RUST_CONFIGURE_ARGS \
--enable-full-tools \
--disable-docs \
--enable-sanitizers \
--enable-profiler
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -19,19 +19,13 @@ RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-arm-linux/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig
COPY host-x86_64/dist-arm-linux-musl/arm-linux-musl.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \
AR_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-ar \
CXX_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-g++
ENV HOSTS=arm-unknown-linux-gnueabi,aarch64-unknown-linux-musl
ENV HOSTS=aarch64-unknown-linux-musl
ENV RUST_CONFIGURE_ARGS \
--enable-full-tools \

View file

@ -0,0 +1,13 @@
CT_CONFIG_VERSION="4"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_USE_MIRROR=y
CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
CT_ARCH_ARM=y
CT_ARCH_ARCH="armv6"
CT_ARCH_FLOAT_SW=y
CT_KERNEL_LINUX=y
CT_LINUX_V_3_2=y
CT_BINUTILS_V_2_32=y
CT_GLIBC_V_2_17=y
CT_GCC_V_8=y
CT_CC_LANG_CXX=y

View file

@ -0,0 +1,41 @@
FROM ubuntu:22.04
COPY scripts/cross-apt-packages.sh /scripts/
RUN sh /scripts/cross-apt-packages.sh
COPY scripts/crosstool-ng.sh /scripts/
RUN sh /scripts/crosstool-ng.sh
COPY scripts/rustbuild-setup.sh /scripts/
RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-powerpc64le-linux-gnu/powerpc64le-unknown-linux-gnu.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
WORKDIR /build
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
COPY scripts/shared.sh scripts/build-powerpc64le-toolchain.sh /build/
RUN ./build-powerpc64le-toolchain.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV \
AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \
CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++
ENV HOSTS=powerpc64le-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS \
--enable-extended \
--enable-full-tools \
--enable-profiler \
--enable-sanitizers \
--disable-docs
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS

View file

@ -0,0 +1,14 @@
CT_CONFIG_VERSION="4"
CT_EXPERIMENTAL=y
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_USE_MIRROR=y
CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
CT_ARCH_POWERPC=y
CT_ARCH_LE=y
CT_ARCH_64=y
# CT_DEMULTILIB is not set
CT_ARCH_ARCH="powerpc64le"
CT_KERNEL_LINUX=y
CT_LINUX_V_4_19=y
CT_CC_LANG_CXX=y
CT_GETTEXT_NEEDED=y

View file

@ -12,13 +12,13 @@ RUN sh /scripts/rustbuild-setup.sh
WORKDIR /tmp
COPY scripts/crosstool-ng-build.sh /scripts/
COPY host-x86_64/dist-powerpc64le-linux/powerpc64le-unknown-linux-musl.defconfig /tmp/crosstool.defconfig
COPY host-x86_64/dist-powerpc64le-linux-musl/powerpc64le-unknown-linux-musl.defconfig /tmp/crosstool.defconfig
RUN /scripts/crosstool-ng-build.sh
WORKDIR /build
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
COPY scripts/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /build/
COPY scripts/shared.sh scripts/build-powerpc64le-toolchain.sh /build/
RUN ./build-powerpc64le-toolchain.sh
COPY scripts/sccache.sh /scripts/
@ -27,14 +27,11 @@ RUN sh /scripts/sccache.sh
ENV PATH=$PATH:/x-tools/powerpc64le-unknown-linux-musl/bin
ENV \
AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \
CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++ \
AR_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-ar \
CC_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-gcc \
CXX_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-g++
ENV HOSTS=powerpc64le-unknown-linux-gnu,powerpc64le-unknown-linux-musl
ENV HOSTS=powerpc64le-unknown-linux-musl
ENV RUST_CONFIGURE_ARGS \
--enable-extended \

View file

@ -97,9 +97,8 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
docker --version
REGISTRY=ghcr.io
# Hardcode username to reuse cache between auto and pr jobs
# FIXME: should be changed after move from rust-lang-ci
REGISTRY_USERNAME=rust-lang-ci
# Default to `rust-lang` to allow reusing the cache for local builds
REGISTRY_USERNAME=${GITHUB_REPOSITORY_OWNER:-rust-lang}
# Tag used to push the final Docker image, so that it can be pulled by e.g. rustup
IMAGE_TAG=${REGISTRY}/${REGISTRY_USERNAME}/rust-ci:${cksum}
# Tag used to cache the Docker build

View file

@ -10,11 +10,6 @@ runners:
free_disk: true
<<: *base-job
# Large runner used mainly for its bigger disk capacity
- &job-linux-4c-largedisk
os: ubuntu-24.04-4core-16gb
<<: *base-job
- &job-linux-8c
os: ubuntu-24.04-8core-32gb
<<: *base-job
@ -167,8 +162,11 @@ auto:
- name: dist-android
<<: *job-linux-4c
- name: dist-arm-linux
<<: *job-linux-8c-codebuild
- name: dist-arm-linux-gnueabi
<<: *job-linux-4c
- name: dist-arm-linux-musl
<<: *job-linux-4c
- name: dist-armhf-linux
<<: *job-linux-4c
@ -203,8 +201,11 @@ auto:
- name: dist-powerpc64-linux
<<: *job-linux-4c
- name: dist-powerpc64le-linux
<<: *job-linux-4c-largedisk
- name: dist-powerpc64le-linux-gnu
<<: *job-linux-4c
- name: dist-powerpc64le-linux-musl
<<: *job-linux-4c
- name: dist-riscv64-linux
<<: *job-linux-4c

@ -1 +1 @@
Subproject commit 1b1bb49babd65c732468cfa515b0c009bd1d26bc
Subproject commit aa6ce337c0adf7a63e33960d184270f2a45ab9ef

@ -1 +1 @@
Subproject commit acd0231ebc74849f6a8907b5e646ce86721aad76
Subproject commit 118fd1f1f0854f50e3ae1fe4b64862aad23009ca

View file

@ -12,7 +12,7 @@ use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::{panic, str};
pub(crate) use make::DocTestBuilder;
pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder};
pub(crate) use markdown::test as test_markdown;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
use rustc_errors::emitter::HumanReadableErrorType;
@ -23,9 +23,9 @@ use rustc_hir::def_id::LOCAL_CRATE;
use rustc_interface::interface;
use rustc_session::config::{self, CrateType, ErrorOutputType, Input};
use rustc_session::lint;
use rustc_span::FileName;
use rustc_span::edition::Edition;
use rustc_span::symbol::sym;
use rustc_span::{FileName, Span};
use rustc_target::spec::{Target, TargetTuple};
use tempfile::{Builder as TempFileBuilder, TempDir};
use tracing::debug;
@ -197,7 +197,7 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions
}
} else {
let mut collector = CreateRunnableDocTests::new(options, opts);
tests.into_iter().for_each(|t| collector.add_test(t));
tests.into_iter().for_each(|t| collector.add_test(t, Some(compiler.sess.dcx())));
Ok(Some(collector))
}
@ -847,6 +847,7 @@ pub(crate) struct ScrapedDocTest {
langstr: LangString,
text: String,
name: String,
span: Span,
}
impl ScrapedDocTest {
@ -856,6 +857,7 @@ impl ScrapedDocTest {
logical_path: Vec<String>,
langstr: LangString,
text: String,
span: Span,
) -> Self {
let mut item_path = logical_path.join("::");
item_path.retain(|c| c != ' ');
@ -865,7 +867,7 @@ impl ScrapedDocTest {
let name =
format!("{} - {item_path}(line {line})", filename.prefer_remapped_unconditionaly());
Self { filename, line, langstr, text, name }
Self { filename, line, langstr, text, name, span }
}
fn edition(&self, opts: &RustdocOptions) -> Edition {
self.langstr.edition.unwrap_or(opts.edition)
@ -921,7 +923,7 @@ impl CreateRunnableDocTests {
}
}
fn add_test(&mut self, scraped_test: ScrapedDocTest) {
fn add_test(&mut self, scraped_test: ScrapedDocTest, dcx: Option<DiagCtxtHandle<'_>>) {
// For example `module/file.rs` would become `module_file_rs`
let file = scraped_test
.filename
@ -945,14 +947,14 @@ impl CreateRunnableDocTests {
);
let edition = scraped_test.edition(&self.rustdoc_options);
let doctest = DocTestBuilder::new(
&scraped_test.text,
Some(&self.opts.crate_name),
edition,
self.can_merge_doctests,
Some(test_id),
Some(&scraped_test.langstr),
);
let doctest = BuildDocTestBuilder::new(&scraped_test.text)
.crate_name(&self.opts.crate_name)
.edition(edition)
.can_merge_doctests(self.can_merge_doctests)
.test_id(test_id)
.lang_str(&scraped_test.langstr)
.span(scraped_test.span)
.build(dcx);
let is_standalone = !doctest.can_be_merged
|| scraped_test.langstr.compile_fail
|| scraped_test.langstr.test_harness

View file

@ -5,7 +5,7 @@
use serde::Serialize;
use super::{DocTestBuilder, ScrapedDocTest};
use super::{BuildDocTestBuilder, ScrapedDocTest};
use crate::config::Options as RustdocOptions;
use crate::html::markdown;
@ -35,16 +35,13 @@ impl ExtractedDocTests {
) {
let edition = scraped_test.edition(options);
let ScrapedDocTest { filename, line, langstr, text, name } = scraped_test;
let ScrapedDocTest { filename, line, langstr, text, name, .. } = scraped_test;
let doctest = DocTestBuilder::new(
&text,
Some(&opts.crate_name),
edition,
false,
None,
Some(&langstr),
);
let doctest = BuildDocTestBuilder::new(&text)
.crate_name(&opts.crate_name)
.edition(edition)
.lang_str(&langstr)
.build(None);
let (full_test_code, size) = doctest.generate_unique_doctest(
&text,
langstr.test_harness,

View file

@ -8,14 +8,14 @@ use std::sync::Arc;
use rustc_ast::token::{Delimiter, TokenKind};
use rustc_ast::tokenstream::TokenTree;
use rustc_ast::{self as ast, AttrStyle, HasAttrs, StmtKind};
use rustc_errors::ColorConfig;
use rustc_errors::emitter::stderr_destination;
use rustc_errors::{ColorConfig, DiagCtxtHandle};
use rustc_parse::new_parser_from_source_str;
use rustc_session::parse::ParseSess;
use rustc_span::edition::Edition;
use rustc_span::edition::{DEFAULT_EDITION, Edition};
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::sym;
use rustc_span::{FileName, kw};
use rustc_span::{DUMMY_SP, FileName, Span, kw};
use tracing::debug;
use super::GlobalTestOptions;
@ -35,6 +35,138 @@ struct ParseSourceInfo {
maybe_crate_attrs: String,
}
/// Builder type for `DocTestBuilder`.
pub(crate) struct BuildDocTestBuilder<'a> {
source: &'a str,
crate_name: Option<&'a str>,
edition: Edition,
can_merge_doctests: bool,
// If `test_id` is `None`, it means we're generating code for a code example "run" link.
test_id: Option<String>,
lang_str: Option<&'a LangString>,
span: Span,
}
impl<'a> BuildDocTestBuilder<'a> {
pub(crate) fn new(source: &'a str) -> Self {
Self {
source,
crate_name: None,
edition: DEFAULT_EDITION,
can_merge_doctests: false,
test_id: None,
lang_str: None,
span: DUMMY_SP,
}
}
#[inline]
pub(crate) fn crate_name(mut self, crate_name: &'a str) -> Self {
self.crate_name = Some(crate_name);
self
}
#[inline]
pub(crate) fn can_merge_doctests(mut self, can_merge_doctests: bool) -> Self {
self.can_merge_doctests = can_merge_doctests;
self
}
#[inline]
pub(crate) fn test_id(mut self, test_id: String) -> Self {
self.test_id = Some(test_id);
self
}
#[inline]
pub(crate) fn lang_str(mut self, lang_str: &'a LangString) -> Self {
self.lang_str = Some(lang_str);
self
}
#[inline]
pub(crate) fn span(mut self, span: Span) -> Self {
self.span = span;
self
}
#[inline]
pub(crate) fn edition(mut self, edition: Edition) -> Self {
self.edition = edition;
self
}
pub(crate) fn build(self, dcx: Option<DiagCtxtHandle<'_>>) -> DocTestBuilder {
let BuildDocTestBuilder {
source,
crate_name,
edition,
can_merge_doctests,
// If `test_id` is `None`, it means we're generating code for a code example "run" link.
test_id,
lang_str,
span,
} = self;
let can_merge_doctests = can_merge_doctests
&& lang_str.is_some_and(|lang_str| {
!lang_str.compile_fail && !lang_str.test_harness && !lang_str.standalone_crate
});
let result = rustc_driver::catch_fatal_errors(|| {
rustc_span::create_session_if_not_set_then(edition, |_| {
parse_source(source, &crate_name, dcx, span)
})
});
let Ok(Ok(ParseSourceInfo {
has_main_fn,
already_has_extern_crate,
supports_color,
has_global_allocator,
has_macro_def,
everything_else,
crates,
crate_attrs,
maybe_crate_attrs,
})) = result
else {
// If the AST returned an error, we don't want this doctest to be merged with the
// others.
return DocTestBuilder::invalid(
String::new(),
String::new(),
String::new(),
source.to_string(),
test_id,
);
};
debug!("crate_attrs:\n{crate_attrs}{maybe_crate_attrs}");
debug!("crates:\n{crates}");
debug!("after:\n{everything_else}");
// If it contains `#[feature]` or `#[no_std]`, we don't want it to be merged either.
let can_be_merged = can_merge_doctests
&& !has_global_allocator
&& crate_attrs.is_empty()
// If this is a merged doctest and a defined macro uses `$crate`, then the path will
// not work, so better not put it into merged doctests.
&& !(has_macro_def && everything_else.contains("$crate"));
DocTestBuilder {
supports_color,
has_main_fn,
crate_attrs,
maybe_crate_attrs,
crates,
everything_else,
already_has_extern_crate,
test_id,
invalid_ast: false,
can_be_merged,
}
}
}
/// This struct contains information about the doctest itself which is then used to generate
/// doctest source code appropriately.
pub(crate) struct DocTestBuilder {
@ -53,74 +185,6 @@ pub(crate) struct DocTestBuilder {
}
impl DocTestBuilder {
pub(crate) fn new(
source: &str,
crate_name: Option<&str>,
edition: Edition,
can_merge_doctests: bool,
// If `test_id` is `None`, it means we're generating code for a code example "run" link.
test_id: Option<String>,
lang_str: Option<&LangString>,
) -> Self {
let can_merge_doctests = can_merge_doctests
&& lang_str.is_some_and(|lang_str| {
!lang_str.compile_fail && !lang_str.test_harness && !lang_str.standalone_crate
});
let result = rustc_driver::catch_fatal_errors(|| {
rustc_span::create_session_if_not_set_then(edition, |_| {
parse_source(source, &crate_name)
})
});
let Ok(Ok(ParseSourceInfo {
has_main_fn,
already_has_extern_crate,
supports_color,
has_global_allocator,
has_macro_def,
everything_else,
crates,
crate_attrs,
maybe_crate_attrs,
})) = result
else {
// If the AST returned an error, we don't want this doctest to be merged with the
// others.
return Self::invalid(
String::new(),
String::new(),
String::new(),
source.to_string(),
test_id,
);
};
debug!("crate_attrs:\n{crate_attrs}{maybe_crate_attrs}");
debug!("crates:\n{crates}");
debug!("after:\n{everything_else}");
// If it contains `#[feature]` or `#[no_std]`, we don't want it to be merged either.
let can_be_merged = can_merge_doctests
&& !has_global_allocator
&& crate_attrs.is_empty()
// If this is a merged doctest and a defined macro uses `$crate`, then the path will
// not work, so better not put it into merged doctests.
&& !(has_macro_def && everything_else.contains("$crate"));
Self {
supports_color,
has_main_fn,
crate_attrs,
maybe_crate_attrs,
crates,
everything_else,
already_has_extern_crate,
test_id,
invalid_ast: false,
can_be_merged,
}
}
fn invalid(
crate_attrs: String,
maybe_crate_attrs: String,
@ -289,7 +353,12 @@ fn reset_error_count(psess: &ParseSess) {
const DOCTEST_CODE_WRAPPER: &str = "fn f(){";
fn parse_source(source: &str, crate_name: &Option<&str>) -> Result<ParseSourceInfo, ()> {
fn parse_source(
source: &str,
crate_name: &Option<&str>,
parent_dcx: Option<DiagCtxtHandle<'_>>,
span: Span,
) -> Result<ParseSourceInfo, ()> {
use rustc_errors::DiagCtxt;
use rustc_errors::emitter::{Emitter, HumanEmitter};
use rustc_span::source_map::FilePathMapping;
@ -466,8 +535,17 @@ fn parse_source(source: &str, crate_name: &Option<&str>) -> Result<ParseSourceIn
}
}
if has_non_items {
// FIXME: if `info.has_main_fn` is `true`, emit a warning here to mention that
// this code will not be called.
if info.has_main_fn
&& let Some(dcx) = parent_dcx
&& !span.is_dummy()
{
dcx.span_warn(
span,
"the `main` function of this doctest won't be run as it contains \
expressions at the top level, meaning that the whole doctest code will be \
wrapped in a function",
);
}
info.has_main_fn = false;
}
Ok(info)

View file

@ -4,7 +4,7 @@ use std::fs::read_to_string;
use std::sync::{Arc, Mutex};
use rustc_session::config::Input;
use rustc_span::FileName;
use rustc_span::{DUMMY_SP, FileName};
use tempfile::tempdir;
use super::{
@ -24,7 +24,14 @@ impl DocTestVisitor for MdCollector {
let filename = self.filename.clone();
// First line of Markdown is line 1.
let line = 1 + rel_line.offset();
self.tests.push(ScrapedDocTest::new(filename, line, self.cur_path.clone(), config, test));
self.tests.push(ScrapedDocTest::new(
filename,
line,
self.cur_path.clone(),
config,
test,
DUMMY_SP,
));
}
fn visit_header(&mut self, name: &str, level: u32) {
@ -107,7 +114,7 @@ pub(crate) fn test(input: &Input, options: Options) -> Result<(), String> {
find_testable_code(&input_str, &mut md_collector, codes, None);
let mut collector = CreateRunnableDocTests::new(options.clone(), opts);
md_collector.tests.into_iter().for_each(|t| collector.add_test(t));
md_collector.tests.into_iter().for_each(|t| collector.add_test(t, None));
let CreateRunnableDocTests { opts, rustdoc_options, standalone_tests, mergeable_tests, .. } =
collector;
crate::doctest::run_tests(

View file

@ -1,5 +1,6 @@
//! Doctest functionality used only for doctests in `.rs` source files.
use std::cell::Cell;
use std::env;
use std::sync::Arc;
@ -47,13 +48,33 @@ impl RustCollector {
impl DocTestVisitor for RustCollector {
fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine) {
let line = self.get_base_line() + rel_line.offset();
let base_line = self.get_base_line();
let line = base_line + rel_line.offset();
let count = Cell::new(base_line);
let span = if line > base_line {
match self.source_map.span_extend_while(self.position, |c| {
if c == '\n' {
let count_v = count.get();
count.set(count_v + 1);
if count_v >= line {
return false;
}
}
true
}) {
Ok(sp) => self.source_map.span_extend_to_line(sp.shrink_to_hi()),
_ => self.position,
}
} else {
self.position
};
self.tests.push(ScrapedDocTest::new(
self.get_filename(),
line,
self.cur_path.clone(),
config,
test,
span,
));
}

View file

@ -1,8 +1,6 @@
use std::path::PathBuf;
use rustc_span::edition::DEFAULT_EDITION;
use super::{DocTestBuilder, GlobalTestOptions};
use super::{BuildDocTestBuilder, GlobalTestOptions};
fn make_test(
test_code: &str,
@ -11,14 +9,14 @@ fn make_test(
opts: &GlobalTestOptions,
test_id: Option<&str>,
) -> (String, usize) {
let doctest = DocTestBuilder::new(
test_code,
crate_name,
DEFAULT_EDITION,
false,
test_id.map(|s| s.to_string()),
None,
);
let mut builder = BuildDocTestBuilder::new(test_code);
if let Some(crate_name) = crate_name {
builder = builder.crate_name(crate_name);
}
if let Some(test_id) = test_id {
builder = builder.test_id(test_id.to_string());
}
let doctest = builder.build(None);
let (code, line_offset) =
doctest.generate_unique_doctest(test_code, dont_insert_main, opts, crate_name);
(code, line_offset)

View file

@ -303,7 +303,11 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
attrs: vec![],
args_file: PathBuf::new(),
};
let doctest = doctest::DocTestBuilder::new(&test, krate, edition, false, None, None);
let mut builder = doctest::BuildDocTestBuilder::new(&test).edition(edition);
if let Some(krate) = krate {
builder = builder.crate_name(krate);
}
let doctest = builder.build(None);
let (test, _) = doctest.generate_unique_doctest(&test, false, &opts, krate);
let channel = if test.contains("#![feature(") { "&amp;version=nightly" } else { "" };

View file

@ -1179,8 +1179,10 @@ function preLoadCss(cssUrl) {
onEachLazy(document.querySelectorAll(".toggle > summary:not(.hideme)"), el => {
// @ts-expect-error
// Clicking on the summary's contents should not collapse it,
// but links within should still fire.
el.addEventListener("click", e => {
if (e.target.tagName !== "SUMMARY" && e.target.tagName !== "A") {
if (!e.target.matches("summary, a, a *")) {
e.preventDefault();
}
});

View file

@ -180,19 +180,13 @@ pub struct Item {
///
/// Does not include `#[deprecated]` attributes: see the [`Self::deprecation`] field instead.
///
/// Some attributes appear in pretty-printed Rust form, regardless of their formatting
/// Attributes appear in pretty-printed Rust form, regardless of their formatting
/// in the original source code. For example:
/// - `#[non_exhaustive]` and `#[must_use]` are represented as themselves.
/// - `#[no_mangle]` and `#[export_name]` are also represented as themselves.
/// - `#[repr(C)]` and other reprs also appear as themselves,
/// though potentially with a different order: e.g. `repr(i8, C)` may become `repr(C, i8)`.
/// Multiple repr attributes on the same item may be combined into an equivalent single attr.
///
/// Other attributes may appear debug-printed. For example:
/// - `#[inline]` becomes something similar to `#[attr="Inline(Hint)"]`.
///
/// As an internal implementation detail subject to change, this debug-printing format
/// is currently equivalent to the HIR pretty-printing of parsed attributes.
pub attrs: Vec<String>,
/// Information about the items deprecation, if present.
pub deprecation: Option<Deprecation>,

View file

@ -6440,6 +6440,7 @@ Released 2018-09-13
[`used_underscore_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#used_underscore_items
[`useless_asref`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_asref
[`useless_attribute`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_attribute
[`useless_concat`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_concat
[`useless_conversion`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_conversion
[`useless_format`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_format
[`useless_let_if_seq`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_let_if_seq

View file

@ -51,7 +51,7 @@ Clippy team directly by mentioning them in the issue or over on [Zulip]. All
currently active team members can be found
[here](https://github.com/rust-lang/rust-clippy/blob/master/triagebot.toml#L18)
Some issues are easier than others. The [`good-first-issue`] label can be used to find the easy
Some issues are easier than others. The [`good first issue`] label can be used to find the easy
issues. You can use `@rustbot claim` to assign the issue to yourself.
There are also some abandoned PRs, marked with [`S-inactive-closed`].
@ -70,7 +70,7 @@ To figure out how this syntax structure is encoded in the AST, it is recommended
Usually the lint will end up to be a nested series of matches and ifs, [like so][deep-nesting].
But we can make it nest-less by using [let chains], [like this][nest-less].
[`E-medium`] issues are generally pretty easy too, though it's recommended you work on an [`good-first-issue`]
[`E-medium`] issues are generally pretty easy too, though it's recommended you work on an [`good first issue`]
first. Sometimes they are only somewhat involved code wise, but not difficult per-se.
Note that [`E-medium`] issues may require some knowledge of Clippy internals or some
debugging to find the actual problem behind the issue.
@ -79,7 +79,7 @@ debugging to find the actual problem behind the issue.
lot of methods that are useful, though one of the most useful would be `expr_ty` (gives the type of
an AST expression).
[`good-first-issue`]: https://github.com/rust-lang/rust-clippy/labels/good-first-issue
[`good first issue`]: https://github.com/rust-lang/rust-clippy/labels/good%20first%20issue
[`S-inactive-closed`]: https://github.com/rust-lang/rust-clippy/pulls?q=is%3Aclosed+label%3AS-inactive-closed
[`T-AST`]: https://github.com/rust-lang/rust-clippy/labels/T-AST
[`T-middle`]: https://github.com/rust-lang/rust-clippy/labels/T-middle

View file

@ -1,8 +1,6 @@
[package]
name = "clippy"
# begin autogenerated version
version = "0.1.89"
# end autogenerated version
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"

Some files were not shown because too many files have changed in this diff Show more