Auto merge of #152321 - Zalathar:rollup-ezxwhfJ, r=Zalathar

Rollup of 5 pull requests

Successful merges:

 - rust-lang/rust#150443 (Support long diff conflict markers)
 - rust-lang/rust#151887 (Remove some unnecessary `try`-related type annotations)
 - rust-lang/rust#152037 (Suppress unused_mut lint if mutation fails due to borrowck error)
 - rust-lang/rust#152067 (Weaken `assert_dep_node_not_yet_allocated_in_current_session` for multiple threads)
 - rust-lang/rust#151227 (Document `-Zcache-proc-macros`)
This commit is contained in:
bors 2026-02-08 08:12:38 +00:00
commit be4794c78b
21 changed files with 193 additions and 67 deletions

View file

@ -1205,6 +1205,17 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
"access_place: suppressing error place_span=`{:?}` kind=`{:?}`",
place_span, kind
);
// If the place is being mutated, then mark it as such anyway in order to suppress the
// `unused_mut` lint, which is likely incorrect once the access place error has been
// resolved.
if rw == ReadOrWrite::Write(WriteKind::Mutate)
&& let Ok(root_place) =
self.is_mutable(place_span.0.as_ref(), is_local_mutation_allowed)
{
self.add_used_mut(root_place, state);
}
return;
}

View file

@ -231,13 +231,13 @@ pub(super) fn dump_nll_mir<'tcx>(
dumper.dump_mir(body);
// Also dump the region constraint graph as a graphviz file.
let _: io::Result<()> = try {
let _ = try {
let mut file = dumper.create_dump_file("regioncx.all.dot", body)?;
regioncx.dump_graphviz_raw_constraints(tcx, &mut file)?;
};
// Also dump the region constraint SCC graph as a graphviz file.
let _: io::Result<()> = try {
let _ = try {
let mut file = dumper.create_dump_file("regioncx.scc.dot", body)?;
regioncx.dump_graphviz_scc_constraints(tcx, &mut file)?;
};

View file

@ -58,7 +58,7 @@ pub(crate) fn dump_polonius_mir<'tcx>(
let dumper = dumper.set_extra_data(extra_data).set_options(options);
let _: io::Result<()> = try {
let _ = try {
let mut file = dumper.create_dump_file("html", body)?;
emit_polonius_dump(
&dumper,

View file

@ -2,7 +2,7 @@ use std::ffi::{OsStr, OsString};
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::{env, io, iter, mem, str};
use std::{env, iter, mem, str};
use find_msvc_tools;
use rustc_hir::attrs::WindowsSubsystemKind;
@ -809,7 +809,7 @@ impl<'a> Linker for GccLinker<'a> {
if self.sess.target.is_like_darwin {
// Write a plain, newline-separated list of symbols
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
for (sym, _) in symbols {
debug!(" _{sym}");
@ -821,7 +821,7 @@ impl<'a> Linker for GccLinker<'a> {
}
self.link_arg("-exported_symbols_list").link_arg(path);
} else if self.sess.target.is_like_windows {
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
// .def file similar to MSVC one but without LIBRARY section
@ -845,7 +845,7 @@ impl<'a> Linker for GccLinker<'a> {
self.link_arg("--export").link_arg(sym);
}
} else if crate_type == CrateType::Executable && !self.sess.target.is_like_solaris {
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
writeln!(f, "{{")?;
for (sym, _) in symbols {
@ -860,7 +860,7 @@ impl<'a> Linker for GccLinker<'a> {
self.link_arg("--dynamic-list").link_arg(path);
} else {
// Write an LD version script
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
writeln!(f, "{{")?;
if !symbols.is_empty() {
@ -1139,7 +1139,7 @@ impl<'a> Linker for MsvcLinker<'a> {
}
let path = tmpdir.join("lib.def");
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
// Start off with the standard module name header and then go
@ -1735,7 +1735,7 @@ impl<'a> Linker for AixLinker<'a> {
symbols: &[(String, SymbolExportKind)],
) {
let path = tmpdir.join("list.exp");
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
// FIXME: use llvm-nm to generate export list.
for (symbol, _) in symbols {
@ -2135,7 +2135,7 @@ impl<'a> Linker for BpfLinker<'a> {
symbols: &[(String, SymbolExportKind)],
) {
let path = tmpdir.join("symbols");
let res: io::Result<()> = try {
let res = try {
let mut f = File::create_buffered(&path)?;
for (sym, _) in symbols {
writeln!(f, "{sym}")?;

View file

@ -1111,7 +1111,7 @@ pub fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) ->
// know that later). If we are not doing LTO, there is only one optimized
// version of each module, so we re-use that.
let dep_node = cgu.codegen_dep_node(tcx);
tcx.dep_graph.assert_dep_node_not_yet_allocated_in_current_session(&dep_node, || {
tcx.dep_graph.assert_dep_node_not_yet_allocated_in_current_session(tcx.sess, &dep_node, || {
format!(
"CompileCodegenUnit dep-node for CGU `{}` already exists before marking.",
cgu.name()

View file

@ -503,7 +503,7 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
};
// Try to print via the pager, pretty output if possible.
let pager_res: Option<()> = try {
let pager_res = try {
let mut pager = cmd.stdin(Stdio::piped()).spawn().ok()?;
let pager_stdin = pager.stdin.as_mut()?;

View file

@ -7,7 +7,7 @@ use rustc_ast::tokenstream::{self, DelimSpacing, Spacing, TokenStream};
use rustc_ast::util::literal::escape_byte_str_symbol;
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan};
use rustc_parse::lexer::{StripTokens, nfc_normalize};
use rustc_parse::parser::Parser;
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
@ -591,7 +591,7 @@ impl server::Server for Rustc<'_, '_> {
fn ts_expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
// Parse the expression from our tokenstream.
let expr: PResult<'_, _> = try {
let expr = try {
let mut p = Parser::new(self.psess(), stream.clone(), Some("proc_macro expand expr"));
let expr = p.parse_expr()?;
if p.token != token::Eof {

View file

@ -1844,7 +1844,7 @@ fn compare_synthetic_generics<'tcx>(
// The case where the impl method uses `impl Trait` but the trait method uses
// explicit generics
err.span_label(impl_span, "expected generic parameter, found `impl Trait`");
let _: Option<_> = try {
try {
// try taking the name from the trait impl
// FIXME: this is obviously suboptimal since the name can already be used
// as another generic argument
@ -1881,7 +1881,7 @@ fn compare_synthetic_generics<'tcx>(
// The case where the trait method uses `impl Trait`, but the impl method uses
// explicit generics.
err.span_label(impl_span, "expected `impl Trait`, found generic parameter");
let _: Option<_> = try {
try {
let impl_m = impl_m.def_id.as_local()?;
let impl_m = tcx.hir_expect_impl_item(impl_m);
let (sig, _) = impl_m.expect_fn();

View file

@ -582,7 +582,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
let deps_output = outputs.path(OutputType::DepInfo);
let deps_filename = deps_output.as_path();
let result: io::Result<()> = try {
let result = try {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let mut files: IndexMap<String, (u64, Option<SourceFileHash>)> = sess

View file

@ -746,24 +746,23 @@ fn pat_ty_is_known_nonnull<'tcx>(
typing_env: ty::TypingEnv<'tcx>,
pat: ty::Pattern<'tcx>,
) -> bool {
Option::unwrap_or_default(
try {
match *pat {
ty::PatternKind::Range { start, end } => {
let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
try {
match *pat {
ty::PatternKind::Range { start, end } => {
let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
// This also works for negative numbers, as we just need
// to ensure we aren't wrapping over zero.
start > 0 && end >= start
}
ty::PatternKind::NotNull => true,
ty::PatternKind::Or(patterns) => {
patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
}
// This also works for negative numbers, as we just need
// to ensure we aren't wrapping over zero.
start > 0 && end >= start
}
},
)
ty::PatternKind::NotNull => true,
ty::PatternKind::Or(patterns) => {
patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
}
}
}
.unwrap_or_default()
}
/// Given a non-null scalar (or transparent) type `ty`, return the nullable version of that type.

View file

@ -157,13 +157,13 @@ impl<'dis, 'de, 'tcx> MirDumper<'dis, 'de, 'tcx> {
/// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` and `bar` both appear in the name.
pub fn dump_mir(&self, body: &Body<'tcx>) {
let _: io::Result<()> = try {
let _ = try {
let mut file = self.create_dump_file("mir", body)?;
self.dump_mir_to_writer(body, &mut file)?;
};
if self.tcx().sess.opts.unstable_opts.dump_mir_graphviz {
let _: io::Result<()> = try {
let _ = try {
let mut file = self.create_dump_file("dot", body)?;
write_mir_fn_graphviz(self.tcx(), body, false, &mut file)?;
};

View file

@ -85,7 +85,7 @@ pub(super) fn build_custom_mir<'tcx>(
block_map: FxHashMap::default(),
};
let res: PResult<_> = try {
let res = try {
pctxt.parse_args(params)?;
pctxt.parse_body(expr)?;
};

View file

@ -3023,27 +3023,53 @@ impl<'a> Parser<'a> {
long_kind: &TokenKind,
short_kind: &TokenKind,
) -> bool {
(0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind))
&& self.look_ahead(3, |tok| tok == short_kind)
if long_kind == short_kind {
// For conflict marker chars like `%` and `\`.
(0..7).all(|i| self.look_ahead(i, |tok| tok == long_kind))
} else {
// For conflict marker chars like `<` and `|`.
(0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind))
&& self.look_ahead(3, |tok| tok == short_kind || tok == long_kind)
}
}
fn conflict_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> {
fn conflict_marker(
&mut self,
long_kind: &TokenKind,
short_kind: &TokenKind,
expected: Option<usize>,
) -> Option<(Span, usize)> {
if self.is_vcs_conflict_marker(long_kind, short_kind) {
let lo = self.token.span;
for _ in 0..4 {
self.bump();
if self.psess.source_map().span_to_margin(lo) != Some(0) {
return None;
}
return Some(lo.to(self.prev_token.span));
let mut len = 0;
while self.token.kind == *long_kind || self.token.kind == *short_kind {
if self.token.kind.break_two_token_op(1).is_some() {
len += 2;
} else {
len += 1;
}
self.bump();
if expected == Some(len) {
break;
}
}
if expected.is_some() && expected != Some(len) {
return None;
}
return Some((lo.to(self.prev_token.span), len));
}
None
}
pub(super) fn recover_vcs_conflict_marker(&mut self) {
// <<<<<<<
let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else {
let Some((start, len)) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt, None) else {
return;
};
let mut spans = Vec::with_capacity(3);
let mut spans = Vec::with_capacity(2);
spans.push(start);
// |||||||
let mut middlediff3 = None;
@ -3055,13 +3081,19 @@ impl<'a> Parser<'a> {
if self.token == TokenKind::Eof {
break;
}
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) {
if let Some((span, _)) =
self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or, Some(len))
{
middlediff3 = Some(span);
}
if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
if let Some((span, _)) =
self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq, Some(len))
{
middle = Some(span);
}
if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) {
if let Some((span, _)) =
self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt, Some(len))
{
spans.push(span);
end = Some(span);
break;

View file

@ -1157,14 +1157,14 @@ impl<'tcx> TypePrivacyVisitor<'tcx> {
let typeck_results = self
.maybe_typeck_results
.unwrap_or_else(|| span_bug!(span, "`hir::Expr` or `hir::Pat` outside of a body"));
let result: ControlFlow<()> = try {
try {
self.visit(typeck_results.node_type(id))?;
self.visit(typeck_results.node_args(id))?;
if let Some(adjustments) = typeck_results.adjustments().get(id) {
adjustments.iter().try_for_each(|adjustment| self.visit(adjustment.target))?;
}
};
result.is_break()
}
.is_break()
}
fn check_def_id(&self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {

View file

@ -336,13 +336,17 @@ impl<D: Deps> DepGraphData<D> {
// in `DepGraph::try_mark_green()`.
// 2. Two distinct query keys get mapped to the same `DepNode`
// (see for example #48923).
self.assert_dep_node_not_yet_allocated_in_current_session(&key, || {
format!(
"forcing query with already existing `DepNode`\n\
self.assert_dep_node_not_yet_allocated_in_current_session(
cx.dep_context().sess(),
&key,
|| {
format!(
"forcing query with already existing `DepNode`\n\
- query-key: {arg:?}\n\
- dep-node: {key:?}"
)
});
)
},
);
let with_deps = |task_deps| D::with_deps(task_deps, || task(cx, arg));
let (result, edges) = if cx.dep_context().is_eval_always(key.kind) {
@ -627,12 +631,20 @@ impl<D: Deps> DepGraph<D> {
impl<D: Deps> DepGraphData<D> {
fn assert_dep_node_not_yet_allocated_in_current_session<S: std::fmt::Display>(
&self,
sess: &Session,
dep_node: &DepNode,
msg: impl FnOnce() -> S,
) {
if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
let current = self.colors.get(prev_index);
assert_matches!(current, DepNodeColor::Unknown, "{}", msg())
let color = self.colors.get(prev_index);
let ok = match color {
DepNodeColor::Unknown => true,
DepNodeColor::Red => false,
DepNodeColor::Green(..) => sess.threads() > 1, // Other threads may mark this green
};
if !ok {
panic!("{}", msg())
}
} else if let Some(nodes_in_current_session) = &self.current.nodes_in_current_session {
outline(|| {
let seen = nodes_in_current_session.lock().contains_key(dep_node);
@ -1035,11 +1047,12 @@ impl<D: Deps> DepGraph<D> {
pub fn assert_dep_node_not_yet_allocated_in_current_session<S: std::fmt::Display>(
&self,
sess: &Session,
dep_node: &DepNode,
msg: impl FnOnce() -> S,
) {
if let Some(data) = &self.data {
data.assert_dep_node_not_yet_allocated_in_current_session(dep_node, msg)
data.assert_dep_node_not_yet_allocated_in_current_session(sess, dep_node, msg)
}
}

View file

@ -1023,7 +1023,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
hir::ExprKind::MethodCall(segment, ..) => {
if let Some(def_id) = self.typeck_results.type_dependent_def_id(expr.hir_id) {
let generics = tcx.generics_of(def_id);
let insertable: Option<_> = try {
let insertable = try {
if generics.has_impl_trait() {
None?
}
@ -1061,7 +1061,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
//
// FIXME: We deal with that one separately for now,
// would be good to remove this special case.
let last_segment_using_path_data: Option<_> = try {
let last_segment_using_path_data = try {
let generics_def_id = tcx.res_generics_def_id(path.res)?;
let generics = tcx.generics_of(generics_def_id);
if generics.has_impl_trait() {
@ -1117,19 +1117,18 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
};
let generics = tcx.generics_of(def_id);
let segment: Option<_> = try {
if !segment.infer_args || generics.has_impl_trait() {
do yeet ();
}
let segment = if !segment.infer_args || generics.has_impl_trait() {
None
} else {
let span = tcx.hir_span(segment.hir_id);
let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi());
InsertableGenericArgs {
Some(InsertableGenericArgs {
insert_span,
args,
generics_def_id: def_id,
def_id,
have_turbofish: false,
}
})
};
let parent_def_id = generics.parent.unwrap();

View file

@ -0,0 +1,9 @@
## `cache-proc-macros`
The tracking issue for this feature is: [#151364]
[#151364]: https://github.com/rust-lang/rust/issues/151364
------------------------
This option instructs `rustc` to cache (derive) proc-macro invocations using the incremental system. Note that the compiler does not currently check whether the proc-macro is actually "cacheable" or not. If you use this flag when compiling a crate that uses non-pure proc-macros, it can result in stale expansions being compiled.

View file

@ -0,0 +1,19 @@
//! Do not fire unused_mut lint when mutation of the bound variable fails due to a borrow-checking
//! error.
//!
//! Regression test for https://github.com/rust-lang/rust/issues/152024
//@ compile-flags: -W unused_mut
struct Thing;
impl Drop for Thing {
fn drop(&mut self) {}
}
fn main() {
let mut t;
let mut b = None;
loop {
t = Thing; //~ ERROR cannot assign to `t` because it is borrowed
b.insert(&t);
}
}

View file

@ -0,0 +1,13 @@
error[E0506]: cannot assign to `t` because it is borrowed
--> $DIR/mut-used-despite-borrowck-error.rs:16:9
|
LL | t = Thing;
| ^ `t` is assigned to here but it was already borrowed
LL | b.insert(&t);
| - -- `t` is borrowed here
| |
| borrow later used here
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0506`.

View file

@ -0,0 +1,11 @@
enum E {
Foo {
<<<<<<<<< HEAD //~ ERROR encountered diff marker
x: u8,
|||||||
z: (),
=========
y: i8,
>>>>>>>>> branch
}
}

View file

@ -0,0 +1,20 @@
error: encountered diff marker
--> $DIR/long-conflict-markers.rs:3:1
|
LL | <<<<<<<<< HEAD
| ^^^^^^^^^ between this marker and `=======` is the code that you are merging into
...
LL | =========
| --------- between this marker and `>>>>>>>` is the incoming code
LL | y: i8,
LL | >>>>>>>>> branch
| ^^^^^^^^^ this marker concludes the conflict region
|
= note: conflict markers indicate that a merge was started but could not be completed due to merge conflicts
to resolve a conflict, keep only the code you want and then delete the lines containing conflict markers
= help: if you are in a merge, the top section is the code you already had checked out and the bottom section is the new code
if you are in a rebase, the top section is the code being rebased onto and the bottom section is the code you had checked out which is being rebased
= note: for an explanation on these markers from the `git` documentation, visit <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>
error: aborting due to 1 previous error