Auto merge of #151701 - JonathanBrouwer:rollup-PvhvBqX, r=JonathanBrouwer

Rollup of 7 pull requests

Successful merges:

 - rust-lang/rust#151290 (Recover from struct literals with placeholder or empty path)
 - rust-lang/rust#148187 (Remove uses of `&mut CmResolver`)
 - rust-lang/rust#151368 (Rustdoc performance improvements)
 - rust-lang/rust#151374 (some more rustc_borrowck cleanups)
 - rust-lang/rust#151536 (Fix sanitizer target builds on CI)
 - rust-lang/rust#151626 (Remove `Deref<Target = TyCtxt>` from `QueryCtxt`)
 - rust-lang/rust#151661 (Suggest changing `iter`/`into_iter` when the other was meant)
This commit is contained in:
bors 2026-01-26 20:33:36 +00:00
commit b3cda168c8
30 changed files with 467 additions and 109 deletions

View file

@ -343,8 +343,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
return;
}
// FIXME: Ideally MIR types are normalized, but this is not always true.
let mir_ty = self.normalize(mir_ty, Locations::All(span));
// This is a hack. `body.local_decls` are not necessarily normalized in the old
// solver due to not deeply normalizing in writeback. So we must re-normalize here.
//
// I am not sure of a test case where this actually matters. There is a similar
// hack in `equate_inputs_and_outputs` which does have associated test cases.
let mir_ty = match self.infcx.next_trait_solver() {
true => mir_ty,
false => self.normalize(mir_ty, Locations::All(span)),
};
let cause = ObligationCause::dummy_with_span(span);
let param_env = self.infcx.param_env;
@ -353,6 +360,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
ConstraintCategory::Boring,
type_op::custom::CustomTypeOp::new(
|ocx| {
// The `AscribeUserType` query would normally emit a wf
// obligation for the unnormalized user_ty here. This is
// where the "incorrectly skips the WF checks we normally do"
// happens
let user_ty = ocx.normalize(&cause, param_env, user_ty);
ocx.eq(&cause, param_env, user_ty, mir_ty)?;
Ok(())

View file

@ -126,6 +126,31 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}
// FIXME(BoxyUwU): This should probably be part of a larger borrowck dev-guide chapter
//
/// Enforce that the types of the locals corresponding to the inputs and output of
/// the body are equal to those of the (normalized) signature.
///
/// This is necessary for two reasons:
/// - Locals in the MIR all start out with `'erased` regions and then are replaced
/// with unconstrained nll vars. If we have a function returning `&'a u32` then
/// the local `_0: &'?10 u32` needs to have its region var equated with the nll
/// var representing `'a`. i.e. borrow check must uphold that `'?10 = 'a`.
/// - When computing the normalized signature we may introduce new unconstrained nll
/// vars due to higher ranked where clauses ([#136547]). We then wind up with implied
/// bounds involving these vars.
///
/// For this reason it is important that we equate with the *normalized* signature
/// which was produced when computing implied bounds. If we do not do so then we will
/// wind up with implied bounds on nll vars which cannot actually be used as the nll
/// var never gets related to anything.
///
/// For 'closure-like' bodies this function effectively relates the *inferred* signature
/// of the closure against the locals corresponding to the closure's inputs/output. It *does
/// not* relate the user provided types for the signature to the locals, this is handled
/// separately by: [`TypeChecker::check_signature_annotation`].
///
/// [#136547]: <https://www.github.com/rust-lang/rust/issues/136547>
#[instrument(skip(self), level = "debug")]
pub(super) fn equate_inputs_and_outputs(&mut self, normalized_inputs_and_output: &[Ty<'tcx>]) {
let (&normalized_output_ty, normalized_input_tys) =
@ -173,38 +198,44 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}
// Return types are a bit more complex. They may contain opaque `impl Trait` types.
let mir_output_ty = self.body.local_decls[RETURN_PLACE].ty;
// Equate expected output ty with the type of the RETURN_PLACE in MIR
let mir_output_ty = self.body.return_ty();
let output_span = self.body.local_decls[RETURN_PLACE].source_info.span;
self.equate_normalized_input_or_output(normalized_output_ty, mir_output_ty, output_span);
}
#[instrument(skip(self), level = "debug")]
fn equate_normalized_input_or_output(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, span: Span) {
if self.infcx.next_trait_solver() {
return self
.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
.unwrap_or_else(|terr| {
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
);
});
}
// This is a hack. `body.local_decls` are not necessarily normalized in the old
// solver due to not deeply normalizing in writeback. So we must re-normalize here.
//
// However, in most cases normalizing is unnecessary so we only do so if it may be
// necessary for type equality to hold. This leads to some (very minor) performance
// wins.
if let Err(_) =
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
{
// FIXME(jackh726): This is a hack. It's somewhat like
// `rustc_traits::normalize_after_erasing_regions`. Ideally, we'd
// like to normalize *before* inserting into `local_decls`, but
// doing so ends up causing some other trouble.
let b = self.normalize(b, Locations::All(span));
// Note: if we have to introduce new placeholders during normalization above, then we
// won't have added those universes to the universe info, which we would want in
// `relate_tys`.
if let Err(terr) =
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
{
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{:?}=={:?}` failed with `{:?}`",
a,
b,
terr
);
}
}
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
.unwrap_or_else(|terr| {
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
);
});
};
}
}

View file

@ -123,16 +123,19 @@ pub(crate) fn type_check<'tcx>(
known_type_outlives_obligations,
} = free_region_relations::create(infcx, universal_regions, &mut constraints);
let pre_obligations = infcx.take_registered_region_obligations();
assert!(
pre_obligations.is_empty(),
"there should be no incoming region obligations = {pre_obligations:#?}",
);
let pre_assumptions = infcx.take_registered_region_assumptions();
assert!(
pre_assumptions.is_empty(),
"there should be no incoming region assumptions = {pre_assumptions:#?}",
);
{
// Scope these variables so it's clear they're not used later
let pre_obligations = infcx.take_registered_region_obligations();
assert!(
pre_obligations.is_empty(),
"there should be no incoming region obligations = {pre_obligations:#?}",
);
let pre_assumptions = infcx.take_registered_region_assumptions();
assert!(
pre_assumptions.is_empty(),
"there should be no incoming region assumptions = {pre_assumptions:#?}",
);
}
debug!(?normalized_inputs_and_output);

View file

@ -609,9 +609,8 @@ impl<'tcx> TyCtxt<'tcx> {
/// have the same `DefKind`.
///
/// Note that closures have a `DefId`, but the closure *expression* also has a
/// `HirId` that is located within the context where the closure appears (and, sadly,
/// a corresponding `NodeId`, since those are not yet phased out). The parent of
/// the closure's `DefId` will also be the context where it appears.
/// `HirId` that is located within the context where the closure appears. The
/// parent of the closure's `DefId` will also be the context where it appears.
pub fn is_closure_like(self, def_id: DefId) -> bool {
matches!(self.def_kind(def_id), DefKind::Closure)
}

View file

@ -822,9 +822,19 @@ parse_struct_literal_body_without_path =
struct literal body without path
.suggestion = you might have forgotten to add the struct literal inside the block
parse_struct_literal_body_without_path_late =
struct literal body without path
.label = struct name missing for struct literal
.suggestion = add the correct type
parse_struct_literal_not_allowed_here = struct literals are not allowed here
.suggestion = surround the struct literal with parentheses
parse_struct_literal_placeholder_path =
placeholder `_` is not allowed for the path in struct literals
.label = not allowed in struct literals
.suggestion = replace it with the correct type
parse_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
.help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)

View file

@ -3684,3 +3684,22 @@ pub(crate) struct ImplReuseInherentImpl {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(parse_struct_literal_placeholder_path)]
pub(crate) struct StructLiteralPlaceholderPath {
#[primary_span]
#[label]
#[suggestion(applicability = "has-placeholders", code = "/* Type */", style = "verbose")]
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(parse_struct_literal_body_without_path_late)]
pub(crate) struct StructLiteralWithoutPathLate {
#[primary_span]
#[label]
pub span: Span,
#[suggestion(applicability = "has-placeholders", code = "/* Type */ ", style = "verbose")]
pub suggestion_span: Span,
}

View file

@ -1468,6 +1468,9 @@ impl<'a> Parser<'a> {
} else if this.check(exp!(OpenParen)) {
this.parse_expr_tuple_parens(restrictions)
} else if this.check(exp!(OpenBrace)) {
if let Some(expr) = this.maybe_recover_bad_struct_literal_path(false)? {
return Ok(expr);
}
this.parse_expr_block(None, lo, BlockCheckMode::Default)
} else if this.check(exp!(Or)) || this.check(exp!(OrOr)) {
this.parse_expr_closure().map_err(|mut err| {
@ -1542,6 +1545,9 @@ impl<'a> Parser<'a> {
} else if this.check_keyword(exp!(Let)) {
this.parse_expr_let(restrictions)
} else if this.eat_keyword(exp!(Underscore)) {
if let Some(expr) = this.maybe_recover_bad_struct_literal_path(true)? {
return Ok(expr);
}
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
} else if this.token_uninterpolated_span().at_least_rust_2018() {
// `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
@ -3698,6 +3704,45 @@ impl<'a> Parser<'a> {
}
}
fn maybe_recover_bad_struct_literal_path(
&mut self,
is_underscore_entry_point: bool,
) -> PResult<'a, Option<Box<Expr>>> {
if self.may_recover()
&& self.check_noexpect(&token::OpenBrace)
&& (!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
&& self.is_likely_struct_lit())
{
let span = if is_underscore_entry_point {
self.prev_token.span
} else {
self.token.span.shrink_to_lo()
};
self.bump(); // {
let expr = self.parse_expr_struct(
None,
Path::from_ident(Ident::new(kw::Underscore, span)),
false,
)?;
let guar = if is_underscore_entry_point {
self.dcx().create_err(errors::StructLiteralPlaceholderPath { span }).emit()
} else {
self.dcx()
.create_err(errors::StructLiteralWithoutPathLate {
span: expr.span,
suggestion_span: expr.span.shrink_to_lo(),
})
.emit()
};
Ok(Some(self.mk_expr_err(expr.span, guar)))
} else {
Ok(None)
}
}
pub(super) fn parse_struct_fields(
&mut self,
pth: ast::Path,

View file

@ -35,6 +35,8 @@ use rustc_span::def_id::LOCAL_CRATE;
use crate::QueryConfigRestored;
/// Implements [`QueryContext`] for use by [`rustc_query_system`], since that
/// crate does not have direct access to [`TyCtxt`].
#[derive(Copy, Clone)]
pub struct QueryCtxt<'tcx> {
pub tcx: TyCtxt<'tcx>,
@ -47,15 +49,6 @@ impl<'tcx> QueryCtxt<'tcx> {
}
}
impl<'tcx> std::ops::Deref for QueryCtxt<'tcx> {
type Target = TyCtxt<'tcx>;
#[inline]
fn deref(&self) -> &Self::Target {
&self.tcx
}
}
impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
type Deps = rustc_middle::dep_graph::DepsType;
type DepContext = TyCtxt<'tcx>;
@ -69,14 +62,16 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
impl QueryContext for QueryCtxt<'_> {
#[inline]
fn jobserver_proxy(&self) -> &Proxy {
&*self.jobserver_proxy
&self.tcx.jobserver_proxy
}
#[inline]
fn next_job_id(self) -> QueryJobId {
QueryJobId(
NonZero::new(self.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed))
.unwrap(),
NonZero::new(
self.tcx.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
)
.unwrap(),
)
}
@ -113,7 +108,8 @@ impl QueryContext for QueryCtxt<'_> {
self,
prev_dep_node_index: SerializedDepNodeIndex,
) -> Option<QuerySideEffect> {
self.query_system
self.tcx
.query_system
.on_disk_cache
.as_ref()
.and_then(|c| c.load_side_effect(self.tcx, prev_dep_node_index))
@ -122,7 +118,7 @@ impl QueryContext for QueryCtxt<'_> {
#[inline(never)]
#[cold]
fn store_side_effect(self, dep_node_index: DepNodeIndex, side_effect: QuerySideEffect) {
if let Some(c) = self.query_system.on_disk_cache.as_ref() {
if let Some(c) = self.tcx.query_system.on_disk_cache.as_ref() {
c.store_side_effect(dep_node_index, side_effect)
}
}
@ -140,7 +136,9 @@ impl QueryContext for QueryCtxt<'_> {
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
// when accessing the `ImplicitCtxt`.
tls::with_related_context(self.tcx, move |current_icx| {
if depth_limit && !self.recursion_limit().value_within_limit(current_icx.query_depth) {
if depth_limit
&& !self.tcx.recursion_limit().value_within_limit(current_icx.query_depth)
{
self.depth_limit_error(token);
}
@ -161,16 +159,16 @@ impl QueryContext for QueryCtxt<'_> {
let query_map = self.collect_active_jobs(true).expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);
let suggested_limit = match self.recursion_limit() {
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
limit => limit * 2,
};
self.sess.dcx().emit_fatal(QueryOverflow {
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote { desc: info.query.description, depth },
suggested_limit,
crate_name: self.crate_name(LOCAL_CRATE),
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
}
}
@ -367,7 +365,7 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
Q: super::QueryConfigRestored<'tcx>,
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
{
let _timer = qcx.profiler().generic_activity_with_arg("encode_query_results_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("encode_query_results_for", query.name());
assert!(query.query_state(qcx).all_inactive());
let cache = query.query_cache(qcx);
@ -389,8 +387,7 @@ pub(crate) fn query_key_hash_verify<'tcx>(
query: impl QueryConfig<QueryCtxt<'tcx>>,
qcx: QueryCtxt<'tcx>,
) {
let _timer =
qcx.profiler().generic_activity_with_arg("query_key_hash_verify_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());
let mut map = UnordMap::default();

View file

@ -58,7 +58,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
orig_ctxt: Span,
derive_fallback_lint_id: Option<NodeId>,
mut visitor: impl FnMut(
&mut CmResolver<'r, 'ra, 'tcx>,
CmResolver<'_, 'ra, 'tcx>,
Scope<'ra>,
UsePrelude,
Span,
@ -165,7 +165,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if visit {
let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No };
if let ControlFlow::Break(break_result) =
visitor(&mut self, scope, use_prelude, ctxt)
visitor(self.reborrow(), scope, use_prelude, ctxt)
{
return Some(break_result);
}
@ -438,7 +438,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
parent_scope,
orig_ident.span,
derive_fallback_lint_id,
|this, scope, use_prelude, ctxt| {
|mut this, scope, use_prelude, ctxt| {
let ident = Ident::new(orig_ident.name, ctxt);
// The passed `ctxt` is already normalized, so avoid expensive double normalization.
let ident = Macros20NormalizedIdent(ident);

View file

@ -893,7 +893,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
};
let mut indeterminate_count = 0;
self.per_ns_cm(|this, ns| {
self.per_ns_cm(|mut this, ns| {
if !type_ns_only || ns == TypeNS {
if bindings[ns].get() != PendingDecl::Pending {
return;

View file

@ -1831,13 +1831,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
f(self, MacroNS);
}
fn per_ns_cm<'r, F: FnMut(&mut CmResolver<'r, 'ra, 'tcx>, Namespace)>(
fn per_ns_cm<'r, F: FnMut(CmResolver<'_, 'ra, 'tcx>, Namespace)>(
mut self: CmResolver<'r, 'ra, 'tcx>,
mut f: F,
) {
f(&mut self, TypeNS);
f(&mut self, ValueNS);
f(&mut self, MacroNS);
f(self.reborrow(), TypeNS);
f(self.reborrow(), ValueNS);
f(self, MacroNS);
}
fn is_builtin_macro(&self, res: Res) -> bool {
@ -1902,7 +1902,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}
let scope_set = ScopeSet::All(TypeNS);
self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |this, scope, _, _| {
self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |mut this, scope, _, _| {
match scope {
Scope::ModuleNonGlobs(module, _) => {
this.get_mut().traits_in_module(module, assoc_item, &mut found_traits);

View file

@ -270,6 +270,7 @@ symbols! {
Into,
IntoFuture,
IntoIterator,
IntoIteratorItem,
IoBufRead,
IoLines,
IoRead,

View file

@ -4390,6 +4390,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
path_segment: &hir::PathSegment<'_>,
args: &[hir::Expr<'_>],
prev_ty: Ty<'_>,
err: &mut Diag<'_, G>,
) {
let tcx = self.tcx;
@ -4403,6 +4404,47 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
let TypeError::Sorts(expected_found) = diff else {
continue;
};
if tcx.is_diagnostic_item(sym::IntoIteratorItem, *def_id)
&& path_segment.ident.name == sym::iter
&& self.can_eq(
param_env,
Ty::new_ref(
tcx,
tcx.lifetimes.re_erased,
expected_found.found,
ty::Mutability::Not,
),
*ty,
)
&& let [] = args
{
// Used `.iter()` when `.into_iter()` was likely meant.
err.span_suggestion_verbose(
path_segment.ident.span,
format!("consider consuming the `{prev_ty}` to construct the `Iterator`"),
"into_iter".to_string(),
Applicability::MachineApplicable,
);
}
if tcx.is_diagnostic_item(sym::IntoIteratorItem, *def_id)
&& path_segment.ident.name == sym::into_iter
&& self.can_eq(
param_env,
expected_found.found,
Ty::new_ref(tcx, tcx.lifetimes.re_erased, *ty, ty::Mutability::Not),
)
&& let [] = args
{
// Used `.into_iter()` when `.iter()` was likely meant.
err.span_suggestion_verbose(
path_segment.ident.span,
format!(
"consider not consuming the `{prev_ty}` to construct the `Iterator`"
),
"iter".to_string(),
Applicability::MachineApplicable,
);
}
if tcx.is_diagnostic_item(sym::IteratorItem, *def_id)
&& path_segment.ident.name == sym::map
&& self.can_eq(param_env, expected_found.found, *ty)
@ -4515,6 +4557,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
expr = rcvr_expr;
let assocs_in_this_method =
self.probe_assoc_types_at_expr(&type_diffs, span, prev_ty, expr.hir_id, param_env);
prev_ty = self.resolve_vars_if_possible(
typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)),
);
self.look_for_iterator_item_mistakes(
&assocs_in_this_method,
typeck_results,
@ -4522,12 +4567,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
param_env,
path_segment,
args,
prev_ty,
err,
);
assocs.push(assocs_in_this_method);
prev_ty = self.resolve_vars_if_possible(
typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)),
);
if let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = expr.kind
&& let hir::Path { res: Res::Local(hir_id), .. } = path

View file

@ -281,6 +281,7 @@ pub trait FromIterator<A>: Sized {
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IntoIterator {
/// The type of the elements being iterated over.
#[rustc_diagnostic_item = "IntoIteratorItem"]
#[stable(feature = "rust1", since = "1.0.0")]
type Item;

View file

@ -113,7 +113,6 @@ ENV TARGETS=$TARGETS,wasm32-wasip1
ENV TARGETS=$TARGETS,wasm32-wasip1-threads
ENV TARGETS=$TARGETS,wasm32-wasip2
ENV TARGETS=$TARGETS,wasm32v1-none
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnuasan
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
ENV TARGETS=$TARGETS,x86_64-fortanix-unknown-sgx
ENV TARGETS=$TARGETS,nvptx64-nvidia-cuda
@ -126,6 +125,8 @@ ENV TARGETS=$TARGETS,i686-unknown-uefi
ENV TARGETS=$TARGETS,x86_64-unknown-uefi
ENV TARGETS=$TARGETS,riscv64gc-unknown-linux-musl
ENV TARGETS_SANITIZERS=x86_64-unknown-linux-gnuasan
# As per https://bugs.launchpad.net/ubuntu/+source/gcc-defaults/+bug/1300211
# we need asm in the search path for gcc-9 (for gnux32) but not in the search path of the
# cross compilers.
@ -139,4 +140,4 @@ ENV RUST_CONFIGURE_ARGS --enable-extended --enable-lld --enable-llvm-bitcode-lin
--musl-root-armv7=/musl-armv7 \
--musl-root-riscv64gc=/musl-riscv64gc
ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS
ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS && python3 ../x.py dist --host='' --set build.sanitizers=true --target $TARGETS_SANITIZERS

View file

@ -26,7 +26,6 @@ use std::str::FromStr;
use std::{fmt, fs};
use indexmap::IndexMap;
use regex::Regex;
use rustc_ast::join_path_syms;
use rustc_data_structures::flock;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
@ -376,12 +375,15 @@ fn hack_get_external_crate_names(
};
// this is only run once so it's fine not to cache it
// !dot_matches_new_line: all crates on same line. greedy: match last bracket
let regex = Regex::new(r"\[.*\]").unwrap();
let Some(content) = regex.find(&content) else {
return Err(Error::new("could not find crates list in crates.js", path));
};
let content: Vec<String> = try_err!(serde_json::from_str(content.as_str()), &path);
Ok(content)
if let Some(start) = content.find('[')
&& let Some(end) = content[start..].find(']')
{
let content: Vec<String> =
try_err!(serde_json::from_str(&content[start..=start + end]), &path);
Ok(content)
} else {
Err(Error::new("could not find crates list in crates.js", path))
}
}
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
@ -504,33 +506,35 @@ impl Hierarchy {
fn add_path(self: &Rc<Self>, path: &Path) {
let mut h = Rc::clone(self);
let mut elems = path
let mut components = path
.components()
.filter_map(|s| match s {
Component::Normal(s) => Some(s.to_owned()),
Component::ParentDir => Some(OsString::from("..")),
_ => None,
})
.filter(|component| matches!(component, Component::Normal(_) | Component::ParentDir))
.peekable();
loop {
let cur_elem = elems.next().expect("empty file path");
if cur_elem == ".." {
if let Some(parent) = h.parent.upgrade() {
assert!(components.peek().is_some(), "empty file path");
while let Some(component) = components.next() {
match component {
Component::Normal(s) => {
if components.peek().is_none() {
h.elems.borrow_mut().insert(s.to_owned());
break;
}
h = {
let mut children = h.children.borrow_mut();
if let Some(existing) = children.get(s) {
Rc::clone(existing)
} else {
let new_node = Rc::new(Self::with_parent(s.to_owned(), &h));
children.insert(s.to_owned(), Rc::clone(&new_node));
new_node
}
};
}
Component::ParentDir if let Some(parent) = h.parent.upgrade() => {
h = parent;
}
continue;
}
if elems.peek().is_none() {
h.elems.borrow_mut().insert(cur_elem);
break;
} else {
let entry = Rc::clone(
h.children
.borrow_mut()
.entry(cur_elem.clone())
.or_insert_with(|| Rc::new(Self::with_parent(cur_elem, &h))),
);
h = entry;
_ => {}
}
}
}

View file

@ -1,4 +1,7 @@
//@ build-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
// Ensures that we don't regress on "implementation is not general enough" when
// normalizating under binders. Unlike `normalization-generality.rs`, this also produces

View file

@ -1,4 +1,7 @@
//@ build-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
// Ensures that we don't regress on "implementation is not general enough" when
// normalizating under binders.

View file

@ -1,4 +1,7 @@
//@ check-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
trait Yokeable<'a> {
type Output: 'a;

View file

@ -1,4 +1,7 @@
//@ check-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
#![allow(unused)]

View file

@ -1,4 +1,7 @@
//@check-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
trait Yokeable<'a>: 'static {
type Output: 'a;

View file

@ -0,0 +1,10 @@
//@ run-rustfix
//@ edition:2021
// Suggest using the right `IntoIterator` method. #68095
fn main() {
let _a = [0, 1, 2].iter().chain([3, 4, 5].iter()); //~ ERROR E0271
let _b = [0, 1, 2].into_iter().chain([3, 4, 5].into_iter()); //~ ERROR E0271
// These don't have appropriate suggestions yet.
// let c = [0, 1, 2].iter().chain([3, 4, 5]);
// let d = [0, 1, 2].iter().chain(vec![3, 4, 5]);
}

View file

@ -0,0 +1,10 @@
//@ run-rustfix
//@ edition:2021
// Suggest using the right `IntoIterator` method. #68095
fn main() {
let _a = [0, 1, 2].iter().chain([3, 4, 5].into_iter()); //~ ERROR E0271
let _b = [0, 1, 2].into_iter().chain([3, 4, 5].iter()); //~ ERROR E0271
// These don't have appropriate suggestions yet.
// let c = [0, 1, 2].iter().chain([3, 4, 5]);
// let d = [0, 1, 2].iter().chain(vec![3, 4, 5]);
}

View file

@ -0,0 +1,48 @@
error[E0271]: type mismatch resolving `<IntoIter<{integer}, 3> as IntoIterator>::Item == &{integer}`
--> $DIR/into_iter-when-iter-was-intended.rs:5:37
|
LL | let _a = [0, 1, 2].iter().chain([3, 4, 5].into_iter());
| ----- ^^^^^^^^^^^^^^^^^^^^^ expected `&{integer}`, found integer
| |
| required by a bound introduced by this call
|
note: the method call chain might not have had the expected associated types
--> $DIR/into_iter-when-iter-was-intended.rs:5:47
|
LL | let _a = [0, 1, 2].iter().chain([3, 4, 5].into_iter());
| --------- ^^^^^^^^^^^ `IntoIterator::Item` is `{integer}` here
| |
| this expression has type `[{integer}; 3]`
note: required by a bound in `std::iter::Iterator::chain`
--> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
help: consider not consuming the `[{integer}; 3]` to construct the `Iterator`
|
LL - let _a = [0, 1, 2].iter().chain([3, 4, 5].into_iter());
LL + let _a = [0, 1, 2].iter().chain([3, 4, 5].iter());
|
error[E0271]: type mismatch resolving `<Iter<'_, {integer}> as IntoIterator>::Item == {integer}`
--> $DIR/into_iter-when-iter-was-intended.rs:6:42
|
LL | let _b = [0, 1, 2].into_iter().chain([3, 4, 5].iter());
| ----- ^^^^^^^^^^^^^^^^ expected integer, found `&{integer}`
| |
| required by a bound introduced by this call
|
note: the method call chain might not have had the expected associated types
--> $DIR/into_iter-when-iter-was-intended.rs:6:52
|
LL | let _b = [0, 1, 2].into_iter().chain([3, 4, 5].iter());
| --------- ^^^^^^ `IntoIterator::Item` is `&{integer}` here
| |
| this expression has type `[{integer}; 3]`
note: required by a bound in `std::iter::Iterator::chain`
--> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
help: consider consuming the `&[{integer}]` to construct the `Iterator`
|
LL | let _b = [0, 1, 2].into_iter().chain([3, 4, 5].into_iter());
| +++++
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0271`.

View file

@ -1,4 +1,7 @@
//@check-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
use higher_kinded_types::*;
mod higher_kinded_types {

View file

@ -21,14 +21,12 @@ LL | let x = {
| _____________^
LL | | val: (),
LL | | };
| |_____^
| |_____^ struct name missing for struct literal
|
help: you might have forgotten to add the struct literal inside the block
|
LL ~ let x = { SomeStruct {
LL | val: (),
LL ~ } };
help: add the correct type
|
LL | let x = /* Type */ {
| ++++++++++
error[E0308]: mismatched types
--> $DIR/bare-struct-body.rs:11:14

View file

@ -0,0 +1,14 @@
fn main() {
let _ = {foo: (), bar: {} }; //~ ERROR struct literal body without path
//~| NOTE struct name missing for struct literal
//~| HELP add the correct type
let _ = _ {foo: (), bar: {} }; //~ ERROR placeholder `_` is not allowed for the path in struct literals
//~| NOTE not allowed in struct literals
//~| HELP replace it with the correct type
let _ = {foo: ()}; //~ ERROR struct literal body without path
//~| NOTE struct name missing for struct literal
//~| HELP add the correct type
let _ = _ {foo: ()}; //~ ERROR placeholder `_` is not allowed for the path in struct literals
//~| NOTE not allowed in struct literals
//~| HELP replace it with the correct type
}

View file

@ -0,0 +1,48 @@
error: struct literal body without path
--> $DIR/struct-lit-placeholder-or-empty-path.rs:2:13
|
LL | let _ = {foo: (), bar: {} };
| ^^^^^^^^^^^^^^^^^^^ struct name missing for struct literal
|
help: add the correct type
|
LL | let _ = /* Type */ {foo: (), bar: {} };
| ++++++++++
error: placeholder `_` is not allowed for the path in struct literals
--> $DIR/struct-lit-placeholder-or-empty-path.rs:5:13
|
LL | let _ = _ {foo: (), bar: {} };
| ^ not allowed in struct literals
|
help: replace it with the correct type
|
LL - let _ = _ {foo: (), bar: {} };
LL + let _ = /* Type */ {foo: (), bar: {} };
|
error: struct literal body without path
--> $DIR/struct-lit-placeholder-or-empty-path.rs:8:13
|
LL | let _ = {foo: ()};
| ^^^^^^^^^ struct name missing for struct literal
|
help: add the correct type
|
LL | let _ = /* Type */ {foo: ()};
| ++++++++++
error: placeholder `_` is not allowed for the path in struct literals
--> $DIR/struct-lit-placeholder-or-empty-path.rs:11:13
|
LL | let _ = _ {foo: ()};
| ^ not allowed in struct literals
|
help: replace it with the correct type
|
LL - let _ = _ {foo: ()};
LL + let _ = /* Type */ {foo: ()};
|
error: aborting due to 4 previous errors

View file

@ -0,0 +1,21 @@
// Regression test for issue #98282.
mod blah {
pub struct Stuff { x: i32 }
pub fn do_stuff(_: Stuff) {}
}
fn main() {
blah::do_stuff(_ { x: 10 });
//~^ ERROR placeholder `_` is not allowed for the path in struct literals
//~| NOTE not allowed in struct literals
//~| HELP replace it with the correct type
}
#[cfg(FALSE)]
fn disabled() {
blah::do_stuff(_ { x: 10 });
//~^ ERROR placeholder `_` is not allowed for the path in struct literals
//~| NOTE not allowed in struct literals
//~| HELP replace it with the correct type
}

View file

@ -0,0 +1,26 @@
error: placeholder `_` is not allowed for the path in struct literals
--> $DIR/struct-lit-placeholder-path.rs:9:20
|
LL | blah::do_stuff(_ { x: 10 });
| ^ not allowed in struct literals
|
help: replace it with the correct type
|
LL - blah::do_stuff(_ { x: 10 });
LL + blah::do_stuff(/* Type */ { x: 10 });
|
error: placeholder `_` is not allowed for the path in struct literals
--> $DIR/struct-lit-placeholder-path.rs:17:20
|
LL | blah::do_stuff(_ { x: 10 });
| ^ not allowed in struct literals
|
help: replace it with the correct type
|
LL - blah::do_stuff(_ { x: 10 });
LL + blah::do_stuff(/* Type */ { x: 10 });
|
error: aborting due to 2 previous errors