Auto merge of #149397 - matthiaskrgr:rollup-go79y6a, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - rust-lang/rust#147071 (constify from_fn, try_from_fn, try_map, map)
 - rust-lang/rust#148930 (tweak editor configs)
 - rust-lang/rust#149320 (-Znext-solver: normalize expected function input types when fudging)
 - rust-lang/rust#149363 (Port the `#![windows_subsystem]` attribute to the new attribute system)
 - rust-lang/rust#149378 (make run-make tests use 2024 edition by default)
 - rust-lang/rust#149381 (Add `impl TrustedLen` on `BTree{Map,Set}` iterators)
 - rust-lang/rust#149388 (remove session+blob decoder construction)
 - rust-lang/rust#149390 (`rust-analyzer` subtree update)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-11-27 19:24:40 +00:00
commit c86564c412
113 changed files with 2599 additions and 693 deletions

View file

@ -1,3 +1,5 @@
use rustc_hir::attrs::WindowsSubsystemKind;
use super::prelude::*;
pub(crate) struct CrateNameParser;
@ -142,3 +144,34 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcCoherenceIsCoreParser {
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const CREATE: fn(Span) -> AttributeKind = AttributeKind::RustcCoherenceIsCore;
}
pub(crate) struct WindowsSubsystemParser;
impl<S: Stage> SingleAttributeParser<S> for WindowsSubsystemParser {
const PATH: &[Symbol] = &[sym::windows_subsystem];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::CrateLevel;
const TEMPLATE: AttributeTemplate = template!(NameValueStr: ["windows", "console"], "https://doc.rust-lang.org/reference/runtime.html#the-windows_subsystem-attribute");
fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
let Some(nv) = args.name_value() else {
cx.expected_name_value(
args.span().unwrap_or(cx.inner_span),
Some(sym::windows_subsystem),
);
return None;
};
let kind = match nv.value_as_str() {
Some(sym::console) => WindowsSubsystemKind::Console,
Some(sym::windows) => WindowsSubsystemKind::Windows,
Some(_) | None => {
cx.expected_specific_argument_strings(nv.value_span, &[sym::console, sym::windows]);
return None;
}
};
Some(AttributeKind::WindowsSubsystem(kind, cx.attr_span))
}
}

View file

@ -28,6 +28,7 @@ use crate::attributes::confusables::ConfusablesParser;
use crate::attributes::crate_level::{
CrateNameParser, MoveSizeLimitParser, NoCoreParser, NoStdParser, PatternComplexityLimitParser,
RecursionLimitParser, RustcCoherenceIsCoreParser, TypeLengthLimitParser,
WindowsSubsystemParser,
};
use crate::attributes::debugger::DebuggerViualizerParser;
use crate::attributes::deprecation::DeprecationParser;
@ -211,6 +212,7 @@ attribute_parsers!(
Single<SkipDuringMethodDispatchParser>,
Single<TransparencyParser>,
Single<TypeLengthLimitParser>,
Single<WindowsSubsystemParser>,
Single<WithoutArgs<AllowIncoherentImplParser>>,
Single<WithoutArgs<AllowInternalUnsafeParser>>,
Single<WithoutArgs<AsPtrParser>>,

View file

@ -169,8 +169,6 @@ codegen_ssa_invalid_monomorphization_unsupported_symbol = invalid monomorphizati
codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}`
codegen_ssa_invalid_windows_subsystem = invalid windows subsystem `{$subsystem}`, only `windows` and `console` are allowed
codegen_ssa_ld64_unimplemented_modifier = `as-needed` modifier not implemented yet for ld64
codegen_ssa_lib_def_write_failure = failed to write lib.def file: {$error}

View file

@ -2558,7 +2558,7 @@ fn add_order_independent_options(
&& sess.target.is_like_windows
&& let Some(s) = &codegen_results.crate_info.windows_subsystem
{
cmd.subsystem(s);
cmd.windows_subsystem(*s);
}
// Try to strip as much out of the generated object by removing unused

View file

@ -5,6 +5,7 @@ use std::path::{Path, PathBuf};
use std::{env, io, iter, mem, str};
use find_msvc_tools;
use rustc_hir::attrs::WindowsSubsystemKind;
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_metadata::{
find_native_static_library, try_find_native_dynamic_library, try_find_native_static_library,
@ -345,7 +346,7 @@ pub(crate) trait Linker {
crate_type: CrateType,
symbols: &[(String, SymbolExportKind)],
);
fn subsystem(&mut self, subsystem: &str);
fn windows_subsystem(&mut self, subsystem: WindowsSubsystemKind);
fn linker_plugin_lto(&mut self);
fn add_eh_frame_header(&mut self) {}
fn add_no_exec(&mut self) {}
@ -884,8 +885,8 @@ impl<'a> Linker for GccLinker<'a> {
}
}
fn subsystem(&mut self, subsystem: &str) {
self.link_args(&["--subsystem", subsystem]);
fn windows_subsystem(&mut self, subsystem: WindowsSubsystemKind) {
self.link_args(&["--subsystem", subsystem.as_str()]);
}
fn reset_per_library_state(&mut self) {
@ -1159,9 +1160,8 @@ impl<'a> Linker for MsvcLinker<'a> {
self.link_arg(&arg);
}
fn subsystem(&mut self, subsystem: &str) {
// Note that previous passes of the compiler validated this subsystem,
// so we just blindly pass it to the linker.
fn windows_subsystem(&mut self, subsystem: WindowsSubsystemKind) {
let subsystem = subsystem.as_str();
self.link_arg(&format!("/SUBSYSTEM:{subsystem}"));
// Windows has two subsystems we're interested in right now, the console
@ -1307,7 +1307,7 @@ impl<'a> Linker for EmLinker<'a> {
self.cc_arg(arg);
}
fn subsystem(&mut self, _subsystem: &str) {
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {
// noop
}
@ -1444,7 +1444,7 @@ impl<'a> Linker for WasmLd<'a> {
}
}
fn subsystem(&mut self, _subsystem: &str) {}
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {}
fn linker_plugin_lto(&mut self) {
match self.sess.opts.cg.linker_plugin_lto {
@ -1566,7 +1566,8 @@ impl<'a> Linker for L4Bender<'a> {
self.sess.dcx().emit_warn(errors::L4BenderExportingSymbolsUnimplemented);
}
fn subsystem(&mut self, subsystem: &str) {
fn windows_subsystem(&mut self, subsystem: WindowsSubsystemKind) {
let subsystem = subsystem.as_str();
self.link_arg(&format!("--subsystem {subsystem}"));
}
@ -1735,7 +1736,7 @@ impl<'a> Linker for AixLinker<'a> {
self.link_arg(format!("-bE:{}", path.to_str().unwrap()));
}
fn subsystem(&mut self, _subsystem: &str) {}
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {}
fn reset_per_library_state(&mut self) {
self.hint_dynamic();
@ -1969,7 +1970,7 @@ impl<'a> Linker for PtxLinker<'a> {
) {
}
fn subsystem(&mut self, _subsystem: &str) {}
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {}
fn linker_plugin_lto(&mut self) {}
}
@ -2050,7 +2051,7 @@ impl<'a> Linker for LlbcLinker<'a> {
}
}
fn subsystem(&mut self, _subsystem: &str) {}
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {}
fn linker_plugin_lto(&mut self) {}
}
@ -2134,7 +2135,7 @@ impl<'a> Linker for BpfLinker<'a> {
}
}
fn subsystem(&mut self, _subsystem: &str) {}
fn windows_subsystem(&mut self, _subsystem: WindowsSubsystemKind) {}
fn linker_plugin_lto(&mut self) {}
}

View file

@ -5,7 +5,6 @@ use std::time::{Duration, Instant};
use itertools::Itertools;
use rustc_abi::FIRST_VARIANT;
use rustc_ast as ast;
use rustc_ast::expand::allocator::{
ALLOC_ERROR_HANDLER, ALLOCATOR_METHODS, AllocatorKind, AllocatorMethod, AllocatorTy,
};
@ -13,10 +12,10 @@ use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_data_structures::sync::{IntoDynSyncSend, par_map};
use rustc_data_structures::unord::UnordMap;
use rustc_hir::attrs::{DebuggerVisualizerType, OptimizeAttr};
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::attrs::{AttributeKind, DebuggerVisualizerType, OptimizeAttr};
use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem;
use rustc_hir::{ItemId, Target};
use rustc_hir::{ItemId, Target, find_attr};
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
use rustc_middle::middle::dependency_format::Dependencies;
@ -31,7 +30,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_session::Session;
use rustc_session::config::{self, CrateType, EntryFnType};
use rustc_span::{DUMMY_SP, Symbol, sym};
use rustc_span::{DUMMY_SP, Symbol};
use rustc_symbol_mangling::mangle_internal_symbol;
use rustc_target::spec::{Arch, Os};
use rustc_trait_selection::infer::{BoundRegionConversionTime, TyCtxtInferExt};
@ -896,15 +895,7 @@ impl CrateInfo {
let linked_symbols =
crate_types.iter().map(|&c| (c, crate::back::linker::linked_symbols(tcx, c))).collect();
let local_crate_name = tcx.crate_name(LOCAL_CRATE);
let crate_attrs = tcx.hir_attrs(rustc_hir::CRATE_HIR_ID);
let subsystem =
ast::attr::first_attr_value_str_by_name(crate_attrs, sym::windows_subsystem);
let windows_subsystem = subsystem.map(|subsystem| {
if subsystem != sym::windows && subsystem != sym::console {
tcx.dcx().emit_fatal(errors::InvalidWindowsSubsystem { subsystem });
}
subsystem.to_string()
});
let windows_subsystem = find_attr!(tcx.get_all_attrs(CRATE_DEF_ID), AttributeKind::WindowsSubsystem(kind, _) => *kind);
// This list is used when generating the command line to pass through to
// system linker. The linker expects undefined symbols on the left of the

View file

@ -749,12 +749,6 @@ pub(crate) struct MultipleMainFunctions {
pub span: Span,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_invalid_windows_subsystem)]
pub(crate) struct InvalidWindowsSubsystem {
pub subsystem: Symbol,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_shuffle_indices_evaluation)]
pub(crate) struct ShuffleIndicesEvaluation {

View file

@ -24,7 +24,7 @@ use std::sync::Arc;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_data_structures::unord::UnordMap;
use rustc_hir::CRATE_HIR_ID;
use rustc_hir::attrs::{CfgEntry, NativeLibKind};
use rustc_hir::attrs::{CfgEntry, NativeLibKind, WindowsSubsystemKind};
use rustc_hir::def_id::CrateNum;
use rustc_macros::{Decodable, Encodable, HashStable};
use rustc_metadata::EncodedMetadata;
@ -225,7 +225,7 @@ pub struct CrateInfo {
pub used_crate_source: UnordMap<CrateNum, Arc<CrateSource>>,
pub used_crates: Vec<CrateNum>,
pub dependency_formats: Arc<Dependencies>,
pub windows_subsystem: Option<String>,
pub windows_subsystem: Option<WindowsSubsystemKind>,
pub natvis_debugger_visualizers: BTreeSet<DebuggerVisualizerFile>,
pub lint_levels: CodegenLintLevels,
pub metadata_symbol: String,

View file

@ -404,6 +404,22 @@ pub enum RtsanSetting {
Caller,
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[derive(Encodable, Decodable, HashStable_Generic, PrintAttribute)]
pub enum WindowsSubsystemKind {
Console,
Windows,
}
impl WindowsSubsystemKind {
pub fn as_str(&self) -> &'static str {
match self {
WindowsSubsystemKind::Console => "console",
WindowsSubsystemKind::Windows => "windows",
}
}
}
/// Represents parsed *built-in* inert attributes.
///
/// ## Overview
@ -759,5 +775,8 @@ pub enum AttributeKind {
/// Represents `#[used]`
Used { used_by: UsedBy, span: Span },
/// Represents `#[windows_subsystem]`.
WindowsSubsystem(WindowsSubsystemKind, Span),
// tidy-alphabetical-end
}

View file

@ -106,6 +106,7 @@ impl AttributeKind {
UnsafeSpecializationMarker(..) => No,
UnstableFeatureBound(..) => No,
Used { .. } => No,
WindowsSubsystem(..) => No,
// tidy-alphabetical-end
}
}

View file

@ -243,6 +243,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let expected_input_tys: Option<Vec<_>> = expectation
.only_has_type(self)
.and_then(|expected_output| {
// FIXME(#149379): This operation results in expected input
// types which are potentially not well-formed or for whom the
// function where-bounds don't actually hold. This results
// in weird bugs when later treating these expectations as if
// they were actually correct.
self.fudge_inference_if_ok(|| {
let ocx = ObligationCtxt::new(self);
@ -252,6 +257,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// No argument expectations are produced if unification fails.
let origin = self.misc(call_span);
ocx.sup(&origin, self.param_env, expected_output, formal_output)?;
let formal_input_tys_ns;
let formal_input_tys = if self.next_trait_solver() {
// In the new solver, the normalizations are done lazily.
// Because of this, if we encounter unnormalized alias types inside this
// fudge scope, we might lose the relationships between them and other vars
// when fudging inference variables created here.
// So, we utilize generalization to normalize aliases by adding a new
// inference var and equating it with the type we want to pull out of the
// fudge scope.
formal_input_tys_ns = formal_input_tys
.iter()
.map(|&ty| {
// If we replace a (unresolved) inference var with a new inference
// var, it will be eventually resolved to itself and this will
// weaken type inferences as the new inference var will be fudged
// out and lose all relationships with other vars while the former
// will not be fudged.
if ty.is_ty_var() {
return ty;
}
let generalized_ty = self.next_ty_var(call_span);
ocx.eq(&origin, self.param_env, ty, generalized_ty).unwrap();
generalized_ty
})
.collect_vec();
formal_input_tys_ns.as_slice()
} else {
formal_input_tys
};
if !ocx.try_evaluate_obligations().is_empty() {
return Err(TypeError::Mismatch);
}

View file

@ -683,7 +683,6 @@ impl CStore {
};
let crate_metadata = CrateMetadata::new(
tcx.sess,
self,
metadata,
crate_root,

View file

@ -211,19 +211,6 @@ impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob {
}
}
impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a MetadataBlob, &'tcx Session) {
#[inline]
fn blob(self) -> &'a MetadataBlob {
self.0
}
#[inline]
fn sess(self) -> Option<&'tcx Session> {
let (_, sess) = self;
Some(sess)
}
}
impl<'a, 'tcx> Metadata<'a, 'tcx> for CrateMetadataRef<'a> {
#[inline]
fn blob(self) -> &'a MetadataBlob {
@ -1862,7 +1849,6 @@ impl<'a> CrateMetadataRef<'a> {
impl CrateMetadata {
pub(crate) fn new(
sess: &Session,
cstore: &CStore,
blob: MetadataBlob,
root: CrateRoot,
@ -1876,7 +1862,7 @@ impl CrateMetadata {
) -> CrateMetadata {
let trait_impls = root
.impls
.decode((&blob, sess))
.decode(&blob)
.map(|trait_impls| (trait_impls.trait_id, trait_impls.impls))
.collect();
let alloc_decoding_state =

View file

@ -287,7 +287,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
| AttributeKind::DebuggerVisualizer(..)
| AttributeKind::RustcMain
| AttributeKind::RustcPassIndirectlyInNonRusticAbis(..)
| AttributeKind::PinV2(..),
| AttributeKind::PinV2(..)
| AttributeKind::WindowsSubsystem(..)
) => { /* do nothing */ }
Attribute::Unparsed(attr_item) => {
style = Some(attr_item.style);
@ -361,7 +362,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
// need to be fixed
| sym::cfi_encoding // FIXME(cfi_encoding)
| sym::instruction_set // broken on stable!!!
| sym::windows_subsystem // broken on stable!!!
| sym::patchable_function_entry // FIXME(patchable_function_entry)
| sym::deprecated_safe // FIXME(deprecated_safe)
// internal

View file

@ -3,7 +3,7 @@ use core::cmp::Ordering;
use core::error::Error;
use core::fmt::{self, Debug};
use core::hash::{Hash, Hasher};
use core::iter::FusedIterator;
use core::iter::{FusedIterator, TrustedLen};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop};
use core::ops::{Bound, Index, RangeBounds};
@ -1624,6 +1624,9 @@ impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V> TrustedLen for Iter<'_, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> Clone for Iter<'_, K, V> {
fn clone(&self) -> Self {
@ -1696,6 +1699,9 @@ impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V> TrustedLen for IterMut<'_, K, V> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
@ -1817,6 +1823,9 @@ impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoIter<K, V, A> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V, A: Allocator + Clone> TrustedLen for IntoIter<K, V, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V, A: Allocator + Clone> FusedIterator for IntoIter<K, V, A> {}
@ -1865,6 +1874,9 @@ impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V> TrustedLen for Keys<'_, K, V> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Keys<'_, K, V> {}
@ -1920,6 +1932,9 @@ impl<K, V> ExactSizeIterator for Values<'_, K, V> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V> TrustedLen for Values<'_, K, V> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for Values<'_, K, V> {}
@ -2160,6 +2175,9 @@ impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V> TrustedLen for ValuesMut<'_, K, V> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
@ -2222,6 +2240,9 @@ impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoKeys<K, V, A> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V, A: Allocator + Clone> TrustedLen for IntoKeys<K, V, A> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator + Clone> FusedIterator for IntoKeys<K, V, A> {}
@ -2273,6 +2294,9 @@ impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoValues<K, V, A> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<K, V, A: Allocator + Clone> TrustedLen for IntoValues<K, V, A> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator + Clone> FusedIterator for IntoValues<K, V, A> {}

View file

@ -3,7 +3,7 @@ use core::cmp::Ordering::{self, Equal, Greater, Less};
use core::cmp::{max, min};
use core::fmt::{self, Debug};
use core::hash::{Hash, Hasher};
use core::iter::{FusedIterator, Peekable};
use core::iter::{FusedIterator, Peekable, TrustedLen};
use core::mem::ManuallyDrop;
use core::ops::{BitAnd, BitOr, BitXor, Bound, RangeBounds, Sub};
@ -1753,6 +1753,7 @@ impl<T> Clone for Iter<'_, T> {
Iter { iter: self.iter.clone() }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
@ -1783,12 +1784,14 @@ impl<'a, T> Iterator for Iter<'a, T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
fn next_back(&mut self) -> Option<&'a T> {
self.iter.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Iter<'_, T> {
fn len(&self) -> usize {
@ -1796,6 +1799,9 @@ impl<T> ExactSizeIterator for Iter<'_, T> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Iter<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Iter<'_, T> {}
@ -1832,6 +1838,7 @@ impl<T, A: Allocator + Clone> DoubleEndedIterator for IntoIter<T, A> {
self.iter.next_back().map(|(k, _)| k)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator + Clone> ExactSizeIterator for IntoIter<T, A> {
fn len(&self) -> usize {
@ -1839,6 +1846,9 @@ impl<T, A: Allocator + Clone> ExactSizeIterator for IntoIter<T, A> {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, A: Allocator + Clone> TrustedLen for IntoIter<T, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: Allocator + Clone> FusedIterator for IntoIter<T, A> {}

View file

@ -1,76 +1,108 @@
use crate::iter::{TrustedLen, UncheckedIterator};
use crate::mem::ManuallyDrop;
use crate::ptr::drop_in_place;
use crate::slice;
use crate::marker::{Destruct, PhantomData};
use crate::mem::{ManuallyDrop, SizedTypeProperties, conjure_zst};
use crate::ptr::{NonNull, drop_in_place, from_raw_parts_mut, null_mut};
/// A situationally-optimized version of `array.into_iter().for_each(func)`.
///
/// [`crate::array::IntoIter`]s are great when you need an owned iterator, but
/// storing the entire array *inside* the iterator like that can sometimes
/// pessimize code. Notable, it can be more bytes than you really want to move
/// around, and because the array accesses index into it SRoA has a harder time
/// optimizing away the type than it does iterators that just hold a couple pointers.
///
/// Thus this function exists, which gives a way to get *moved* access to the
/// elements of an array using a small iterator -- no bigger than a slice iterator.
///
/// The function-taking-a-closure structure makes it safe, as it keeps callers
/// from looking at already-dropped elements.
pub(crate) fn drain_array_with<T, R, const N: usize>(
array: [T; N],
func: impl for<'a> FnOnce(Drain<'a, T>) -> R,
) -> R {
let mut array = ManuallyDrop::new(array);
// SAFETY: Now that the local won't drop it, it's ok to construct the `Drain` which will.
let drain = Drain(array.iter_mut());
func(drain)
impl<'l, 'f, T, U, const N: usize, F: FnMut(T) -> U> Drain<'l, 'f, T, N, F> {
/// This function returns a function that lets you index the given array in const.
/// As implemented it can optimize better than iterators, and can be constified.
/// It acts like a sort of guard (owns the array) and iterator combined, which can be implemented
/// as it is a struct that implements const fn;
/// in that regard it is somewhat similar to an array::Iter implementing `UncheckedIterator`.
/// The only method you're really allowed to call is `next()`,
/// anything else is more or less UB, hence this function being unsafe.
/// Moved elements will not be dropped.
/// This will also not actually store the array.
///
/// SAFETY: must only be called `N` times. Thou shalt not drop the array either.
// FIXME(const-hack): this is a hack for `let guard = Guard(array); |i| f(guard[i])`.
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
pub(super) const unsafe fn new(array: &'l mut ManuallyDrop<[T; N]>, f: &'f mut F) -> Self {
// dont drop the array, transfers "ownership" to Self
let ptr: NonNull<T> = NonNull::from_mut(array).cast();
// SAFETY:
// Adding `slice.len()` to the starting pointer gives a pointer
// at the end of `slice`. `end` will never be dereferenced, only checked
// for direct pointer equality with `ptr` to check if the drainer is done.
unsafe {
let end = if T::IS_ZST { null_mut() } else { ptr.as_ptr().add(N) };
Self { ptr, end, f, l: PhantomData }
}
}
}
/// See [`drain_array_with`] -- this is `pub(crate)` only so it's allowed to be
/// mentioned in the signature of that method. (Otherwise it hits `E0446`.)
// INVARIANT: It's ok to drop the remainder of the inner iterator.
pub(crate) struct Drain<'a, T>(slice::IterMut<'a, T>);
/// See [`Drain::new`]; this is our fake iterator.
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
#[unstable(feature = "array_try_map", issue = "79711")]
pub(super) struct Drain<'l, 'f, T, const N: usize, F> {
// FIXME(const-hack): This is essentially a slice::IterMut<'static>, replace when possible.
/// The pointer to the next element to return, or the past-the-end location
/// if the drainer is empty.
///
/// This address will be used for all ZST elements, never changed.
/// As we "own" this array, we dont need to store any lifetime.
ptr: NonNull<T>,
/// For non-ZSTs, the non-null pointer to the past-the-end element.
/// For ZSTs, this is null.
end: *mut T,
impl<T> Drop for Drain<'_, T> {
f: &'f mut F,
l: PhantomData<&'l mut [T; N]>,
}
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
#[unstable(feature = "array_try_map", issue = "79711")]
impl<T, U, const N: usize, F> const FnOnce<(usize,)> for &mut Drain<'_, '_, T, N, F>
where
F: [const] FnMut(T) -> U,
{
type Output = U;
/// This implementation is useless.
extern "rust-call" fn call_once(mut self, args: (usize,)) -> Self::Output {
self.call_mut(args)
}
}
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
#[unstable(feature = "array_try_map", issue = "79711")]
impl<T, U, const N: usize, F> const FnMut<(usize,)> for &mut Drain<'_, '_, T, N, F>
where
F: [const] FnMut(T) -> U,
{
// FIXME(const-hack): ideally this would be an unsafe fn `next()`, and to use it you would instead `|_| unsafe { drain.next() }`.
extern "rust-call" fn call_mut(
&mut self,
(_ /* ignore argument */,): (usize,),
) -> Self::Output {
if T::IS_ZST {
// its UB to call this more than N times, so returning more ZSTs is valid.
// SAFETY: its a ZST? we conjur.
(self.f)(unsafe { conjure_zst::<T>() })
} else {
// increment before moving; if `f` panics, we drop the rest.
let p = self.ptr;
// SAFETY: caller guarantees never called more than N times (see `Drain::new`)
self.ptr = unsafe { self.ptr.add(1) };
// SAFETY: we are allowed to move this.
(self.f)(unsafe { p.read() })
}
}
}
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
#[unstable(feature = "array_try_map", issue = "79711")]
impl<T: [const] Destruct, const N: usize, F> const Drop for Drain<'_, '_, T, N, F> {
fn drop(&mut self) {
// SAFETY: By the type invariant, we're allowed to drop all these.
unsafe { drop_in_place(self.0.as_mut_slice()) }
}
}
impl<T> Iterator for Drain<'_, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
let p: *const T = self.0.next()?;
// SAFETY: The iterator was already advanced, so we won't drop this later.
Some(unsafe { p.read() })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.len();
(n, Some(n))
}
}
impl<T> ExactSizeIterator for Drain<'_, T> {
#[inline]
fn len(&self) -> usize {
self.0.len()
}
}
// SAFETY: This is a 1:1 wrapper for a slice iterator, which is also `TrustedLen`.
unsafe impl<T> TrustedLen for Drain<'_, T> {}
impl<T> UncheckedIterator for Drain<'_, T> {
unsafe fn next_unchecked(&mut self) -> T {
// SAFETY: `Drain` is 1:1 with the inner iterator, so if the caller promised
// that there's an element left, the inner iterator has one too.
let p: *const T = unsafe { self.0.next_unchecked() };
// SAFETY: The iterator was already advanced, so we won't drop this later.
unsafe { p.read() }
if !T::IS_ZST {
// SAFETY: we cant read more than N elements
let slice = unsafe {
from_raw_parts_mut::<[T]>(
self.ptr.as_ptr(),
// SAFETY: `start <= end`
self.end.offset_from_unsigned(self.ptr.as_ptr()),
)
};
// SAFETY: By the type invariant, we're allowed to drop all these. (we own it, after all)
unsafe { drop_in_place(slice) }
}
}
}

View file

@ -12,7 +12,8 @@ use crate::error::Error;
use crate::hash::{self, Hash};
use crate::intrinsics::transmute_unchecked;
use crate::iter::{UncheckedIterator, repeat_n};
use crate::mem::{self, MaybeUninit};
use crate::marker::Destruct;
use crate::mem::{self, ManuallyDrop, MaybeUninit};
use crate::ops::{
ChangeOutputType, ControlFlow, FromResidual, Index, IndexMut, NeverShortCircuit, Residual, Try,
};
@ -25,7 +26,6 @@ mod drain;
mod equality;
mod iter;
pub(crate) use drain::drain_array_with;
#[stable(feature = "array_value_iter", since = "1.51.0")]
pub use iter::IntoIter;
@ -105,9 +105,10 @@ pub fn repeat<T: Clone, const N: usize>(val: T) -> [T; N] {
/// ```
#[inline]
#[stable(feature = "array_from_fn", since = "1.63.0")]
pub fn from_fn<T, const N: usize, F>(f: F) -> [T; N]
#[rustc_const_unstable(feature = "const_array", issue = "147606")]
pub const fn from_fn<T: [const] Destruct, const N: usize, F>(f: F) -> [T; N]
where
F: FnMut(usize) -> T,
F: [const] FnMut(usize) -> T + [const] Destruct,
{
try_from_fn(NeverShortCircuit::wrap_mut_1(f)).0
}
@ -143,11 +144,11 @@ where
/// ```
#[inline]
#[unstable(feature = "array_try_from_fn", issue = "89379")]
pub fn try_from_fn<R, const N: usize, F>(cb: F) -> ChangeOutputType<R, [R::Output; N]>
#[rustc_const_unstable(feature = "array_try_from_fn", issue = "89379")]
pub const fn try_from_fn<R, const N: usize, F>(cb: F) -> ChangeOutputType<R, [R::Output; N]>
where
F: FnMut(usize) -> R,
R: Try,
R::Residual: Residual<[R::Output; N]>,
R: [const] Try<Residual: [const] Residual<[R::Output; N]>, Output: [const] Destruct>,
F: [const] FnMut(usize) -> R + [const] Destruct,
{
let mut array = [const { MaybeUninit::uninit() }; N];
match try_from_fn_erased(&mut array, cb) {
@ -549,9 +550,12 @@ impl<T, const N: usize> [T; N] {
/// ```
#[must_use]
#[stable(feature = "array_map", since = "1.55.0")]
pub fn map<F, U>(self, f: F) -> [U; N]
#[rustc_const_unstable(feature = "const_array", issue = "147606")]
pub const fn map<F, U>(self, f: F) -> [U; N]
where
F: FnMut(T) -> U,
F: [const] FnMut(T) -> U + [const] Destruct,
U: [const] Destruct,
T: [const] Destruct,
{
self.try_map(NeverShortCircuit::wrap_mut_1(f)).0
}
@ -587,11 +591,19 @@ impl<T, const N: usize> [T; N] {
/// assert_eq!(c, Some(a));
/// ```
#[unstable(feature = "array_try_map", issue = "79711")]
pub fn try_map<R>(self, f: impl FnMut(T) -> R) -> ChangeOutputType<R, [R::Output; N]>
#[rustc_const_unstable(feature = "array_try_map", issue = "79711")]
pub const fn try_map<R>(
self,
mut f: impl [const] FnMut(T) -> R + [const] Destruct,
) -> ChangeOutputType<R, [R::Output; N]>
where
R: Try<Residual: Residual<[R::Output; N]>>,
R: [const] Try<Residual: [const] Residual<[R::Output; N]>, Output: [const] Destruct>,
T: [const] Destruct,
{
drain_array_with(self, |iter| try_from_trusted_iterator(iter.map(f)))
let mut me = ManuallyDrop::new(self);
// SAFETY: try_from_fn calls `f` N times.
let mut f = unsafe { drain::Drain::new(&mut me, &mut f) };
try_from_fn(&mut f)
}
/// Returns a slice containing the entire array. Equivalent to `&s[..]`.
@ -885,13 +897,11 @@ where
/// not optimizing away. So if you give it a shot, make sure to watch what
/// happens in the codegen tests.
#[inline]
fn try_from_fn_erased<T, R>(
buffer: &mut [MaybeUninit<T>],
mut generator: impl FnMut(usize) -> R,
) -> ControlFlow<R::Residual>
where
R: Try<Output = T>,
{
#[rustc_const_unstable(feature = "array_try_from_fn", issue = "89379")]
const fn try_from_fn_erased<R: [const] Try<Output: [const] Destruct>>(
buffer: &mut [MaybeUninit<R::Output>],
mut generator: impl [const] FnMut(usize) -> R + [const] Destruct,
) -> ControlFlow<R::Residual> {
let mut guard = Guard { array_mut: buffer, initialized: 0 };
while guard.initialized < guard.array_mut.len() {
@ -930,7 +940,8 @@ impl<T> Guard<'_, T> {
///
/// No more than N elements must be initialized.
#[inline]
pub(crate) unsafe fn push_unchecked(&mut self, item: T) {
#[rustc_const_unstable(feature = "array_try_from_fn", issue = "89379")]
pub(crate) const unsafe fn push_unchecked(&mut self, item: T) {
// SAFETY: If `initialized` was correct before and the caller does not
// invoke this method more than N times then writes will be in-bounds
// and slots will not be initialized more than once.
@ -941,11 +952,11 @@ impl<T> Guard<'_, T> {
}
}
impl<T> Drop for Guard<'_, T> {
#[rustc_const_unstable(feature = "array_try_from_fn", issue = "89379")]
impl<T: [const] Destruct> const Drop for Guard<'_, T> {
#[inline]
fn drop(&mut self) {
debug_assert!(self.initialized <= self.array_mut.len());
// SAFETY: this slice will contain only initialized objects.
unsafe {
self.array_mut.get_unchecked_mut(..self.initialized).assume_init_drop();

View file

@ -1,3 +1,4 @@
use crate::marker::{Destruct, PhantomData};
use crate::ops::ControlFlow;
/// The `?` operator and `try {}` blocks.
@ -363,6 +364,7 @@ where
pub const trait Residual<O>: Sized {
/// The "return" type of this meta-function.
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
// FIXME: ought to be implied
type TryType: [const] Try<Output = O, Residual = Self>;
}
@ -396,6 +398,25 @@ pub(crate) type ChangeOutputType<T: Try<Residual: Residual<V>>, V> =
/// Not currently planned to be exposed publicly, so just `pub(crate)`.
#[repr(transparent)]
pub(crate) struct NeverShortCircuit<T>(pub T);
// FIXME(const-hack): replace with `|a| NeverShortCircuit(f(a))` when const closures added.
pub(crate) struct Wrapped<T, A, F: FnMut(A) -> T> {
f: F,
p: PhantomData<(T, A)>,
}
#[rustc_const_unstable(feature = "const_never_short_circuit", issue = "none")]
impl<T, A, F: [const] FnMut(A) -> T + [const] Destruct> const FnOnce<(A,)> for Wrapped<T, A, F> {
type Output = NeverShortCircuit<T>;
extern "rust-call" fn call_once(mut self, args: (A,)) -> Self::Output {
self.call_mut(args)
}
}
#[rustc_const_unstable(feature = "const_never_short_circuit", issue = "none")]
impl<T, A, F: [const] FnMut(A) -> T> const FnMut<(A,)> for Wrapped<T, A, F> {
extern "rust-call" fn call_mut(&mut self, (args,): (A,)) -> Self::Output {
NeverShortCircuit((self.f)(args))
}
}
impl<T> NeverShortCircuit<T> {
/// Wraps a unary function to produce one that wraps the output into a `NeverShortCircuit`.
@ -403,10 +424,11 @@ impl<T> NeverShortCircuit<T> {
/// This is useful for implementing infallible functions in terms of the `try_` ones,
/// without accidentally capturing extra generic parameters in a closure.
#[inline]
pub(crate) fn wrap_mut_1<A>(
mut f: impl FnMut(A) -> T,
) -> impl FnMut(A) -> NeverShortCircuit<T> {
move |a| NeverShortCircuit(f(a))
pub(crate) const fn wrap_mut_1<A, F>(f: F) -> Wrapped<T, A, F>
where
F: [const] FnMut(A) -> T,
{
Wrapped { f, p: PhantomData }
}
#[inline]
@ -417,7 +439,8 @@ impl<T> NeverShortCircuit<T> {
pub(crate) enum NeverShortCircuitResidual {}
impl<T> Try for NeverShortCircuit<T> {
#[rustc_const_unstable(feature = "const_never_short_circuit", issue = "none")]
impl<T> const Try for NeverShortCircuit<T> {
type Output = T;
type Residual = NeverShortCircuitResidual;
@ -431,15 +454,15 @@ impl<T> Try for NeverShortCircuit<T> {
NeverShortCircuit(x)
}
}
impl<T> FromResidual for NeverShortCircuit<T> {
#[rustc_const_unstable(feature = "const_never_short_circuit", issue = "none")]
impl<T> const FromResidual for NeverShortCircuit<T> {
#[inline]
fn from_residual(never: NeverShortCircuitResidual) -> Self {
match never {}
}
}
impl<T> Residual<T> for NeverShortCircuitResidual {
#[rustc_const_unstable(feature = "const_never_short_circuit", issue = "none")]
impl<T: [const] Destruct> const Residual<T> for NeverShortCircuitResidual {
type TryType = NeverShortCircuit<T>;
}

View file

@ -724,3 +724,20 @@ fn array_eq() {
let not_true = [0u8] == [].as_slice();
assert!(!not_true);
}
#[test]
fn const_array_ops() {
const fn doubler(x: usize) -> usize {
x * 2
}
const fn maybe_doubler(x: usize) -> Option<usize> {
x.checked_mul(2)
}
assert_eq!(const { std::array::from_fn::<_, 5, _>(doubler) }, [0, 2, 4, 6, 8]);
assert_eq!(const { [5, 6, 1, 2].map(doubler) }, [10, 12, 2, 4]);
assert_eq!(const { [1, usize::MAX, 2, 8].try_map(maybe_doubler) }, None);
assert_eq!(const { std::array::try_from_fn::<_, 5, _>(maybe_doubler) }, Some([0, 2, 4, 6, 8]));
#[derive(Debug, PartialEq)]
struct Zst;
assert_eq!([(); 10].try_map(|()| Some(Zst)), Some([const { Zst }; 10]));
}

View file

@ -4,6 +4,7 @@
#![feature(alloc_layout_extra)]
#![feature(array_ptr_get)]
#![feature(array_try_from_fn)]
#![feature(array_try_map)]
#![feature(array_windows)]
#![feature(ascii_char)]
#![feature(ascii_char_variants)]
@ -16,6 +17,7 @@
#![feature(char_internals)]
#![feature(char_max_len)]
#![feature(clone_to_uninit)]
#![feature(const_array)]
#![feature(const_cell_traits)]
#![feature(const_cmp)]
#![feature(const_convert)]

View file

@ -597,6 +597,7 @@ Select which editor you would like to set up [default: None]: ";
"080955765db84bb6cbf178879f489c4e2369397626a6ecb3debedb94a9d0b3ce",
"f501475c6654187091c924ae26187fa5791d74d4a8ab3fb61fbbe4c0275aade1",
"54bc48fe1996177f5eef86d7231b33978e6d8b737cb0a899e622b7e975c95308",
"08d30e455ceec6e01d9bcef8b9449f2ddd14d278ca8627cdad90e02d9f44e938",
],
EditorKind::Helix => &[
"2d3069b8cf1b977e5d4023965eb6199597755e6c96c185ed5f2854f98b83d233",
@ -605,6 +606,7 @@ Select which editor you would like to set up [default: None]: ";
"198c195ed0c070d15907b279b8b4ea96198ca71b939f5376454f3d636ab54da5",
"1c43ead340b20792b91d02b08494ee68708e7e09f56b6766629b4b72079208f1",
"eec09a09452682060afd23dd5d3536ccac5615b3cdbf427366446901215fb9f6",
"cb653043852d9d5ff4a5be56407b859ff9928be055ad3f307eb309aad04765e6",
],
EditorKind::Vim | EditorKind::VsCode => &[
"ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8",
@ -622,6 +624,7 @@ Select which editor you would like to set up [default: None]: ";
"701b73751efd7abd6487f2c79348dab698af7ac4427b79fa3d2087c867144b12",
"a61df796c0c007cb6512127330564e49e57d558dec715703916a928b072a1054",
"02a49ac2d31f00ef6e4531c44e00dac51cea895112e480553f1ba060b3942a47",
"0aa4748848de0d1cb7ece92a0123c8897fef6de2f58aff8fda1426f098b7a798",
],
EditorKind::Zed => &[
"bbce727c269d1bd0c98afef4d612eb4ce27aea3c3a8968c5f10b31affbc40b6c",
@ -630,6 +633,7 @@ Select which editor you would like to set up [default: None]: ";
"4fadd4c87389a601a27db0d3d74a142fa3a2e656ae78982e934dbe24bee32ad6",
"f0bb3d23ab1a49175ab0ef5c4071af95bb03d01d460776cdb716d91333443382",
"5ef83292111d9a8bb63b6afc3abf42d0bc78fe24985f0d2e039e73258b5dab8f",
"74420c13094b530a986b37c4f1d23cb58c0e8e2295f5858ded129fb1574e66f9",
],
}
}

View file

@ -1,8 +1,7 @@
((rustic-mode
.((eglot-workspace-configuration
. (:rust-analyzer
( :check ( :invocationLocation "root"
:invocationStrategy "once"
( :check ( :invocationStrategy "once"
:overrideCommand ["python3"
"x.py"
"check"

View file

@ -2,10 +2,13 @@
# so that r-a's checks don't block user `x` commands and vice-verse.
# R-a's build directory is located in `build-rust-analyzer`.
#
# To build rustfmt and proc macro server for r-a run the following command:
# To download rustfmt and proc macro server for r-a run the following command
# (proc macro server is downloaded automatically with pretty much any command,
# this specific one also downloads rustfmt):
# ```
# x b proc-macro-srv-cli rustfmt --stage 0 --build-dir build-rust-analyzer
# x fmt --check
# ```
# (if that doesn't work -- do `x clean` first)
[language-server.rust-analyzer.config]
linkedProjects = [
@ -18,7 +21,6 @@ linkedProjects = [
]
[language-server.rust-analyzer.config.check]
invocationLocation = "root"
invocationStrategy = "once"
overrideCommand = [
"python3",
@ -31,12 +33,12 @@ overrideCommand = [
[language-server.rust-analyzer.config.rustfmt]
overrideCommand = [
"build-rust-analyzer/host/rustfmt/bin/rustfmt",
"build/host/rustfmt/bin/rustfmt",
"--edition=2024"
]
[language-server.rust-analyzer.config.procMacro]
server = "build-rust-analyzer/host/stage0/libexec/rust-analyzer-proc-macro-srv"
server = "build/host/stage0/libexec/rust-analyzer-proc-macro-srv"
enable = true
[language-server.rust-analyzer.config.rustc]
@ -50,14 +52,20 @@ RUSTC_BOOTSTRAP = "1"
[language-server.rust-analyzer.config.cargo.buildScripts]
enable = true
invocationLocation = "root"
invocationStrategy = "once"
overrideCommand = [
"python3",
"x.py",
"check",
"--json-output",
"--compile-time-deps",
"--build-dir",
"build-rust-analyzer",
"--compile-time-deps",
]
[language-server.rust-analyzer.environment]
RUSTUP_TOOLCHAIN = "nightly"
[[language]]
name = "rust"
file-types = ["rs", "fixed", "pp", "mir"]

View file

@ -1,14 +1,5 @@
{
"git.detectSubmodulesLimit": 20,
"rust-analyzer.check.invocationStrategy": "once",
"rust-analyzer.check.overrideCommand": [
"python3",
"x.py",
"check",
"--build-dir",
"build-rust-analyzer",
"--json-output"
],
"rust-analyzer.linkedProjects": [
"Cargo.toml",
"compiler/rustc_codegen_cranelift/Cargo.toml",
@ -17,28 +8,37 @@
"src/bootstrap/Cargo.toml",
"src/tools/rust-analyzer/Cargo.toml"
],
"rust-analyzer.check.invocationStrategy": "once",
"rust-analyzer.check.overrideCommand": [
"python3",
"x.py",
"check",
"--json-output",
"--build-dir",
"build-rust-analyzer"
],
"rust-analyzer.rustfmt.overrideCommand": [
"${workspaceFolder}/build-rust-analyzer/host/rustfmt/bin/rustfmt",
"${workspaceFolder}/build/host/rustfmt/bin/rustfmt",
"--edition=2024"
],
"rust-analyzer.procMacro.server": "${workspaceFolder}/build-rust-analyzer/host/stage0/libexec/rust-analyzer-proc-macro-srv",
"rust-analyzer.procMacro.server": "${workspaceFolder}/build/host/stage0/libexec/rust-analyzer-proc-macro-srv",
"rust-analyzer.procMacro.enable": true,
"rust-analyzer.rustc.source": "./Cargo.toml",
"rust-analyzer.cargo.sysrootSrc": "./library",
"rust-analyzer.cargo.extraEnv": {
"RUSTC_BOOTSTRAP": "1"
},
"rust-analyzer.cargo.buildScripts.enable": true,
"rust-analyzer.cargo.buildScripts.invocationStrategy": "once",
"rust-analyzer.cargo.buildScripts.overrideCommand": [
"python3",
"x.py",
"check",
"--build-dir",
"build-rust-analyzer",
"--json-output",
"--compile-time-deps"
"--build-dir",
"build-rust-analyzer",
],
"rust-analyzer.cargo.sysrootSrc": "./library",
"rust-analyzer.rustc.source": "./Cargo.toml",
"rust-analyzer.cargo.extraEnv": {
"RUSTC_BOOTSTRAP": "1"
},
"rust-analyzer.server.extraEnv": {
"RUSTUP_TOOLCHAIN": "nightly"
},

View file

@ -2,28 +2,15 @@
"lsp": {
"rust-analyzer": {
"initialization_options": {
"cargo": {
"buildScripts": {
"enable": true,
"invocationLocation": "root",
"invocationStrategy": "once",
"overrideCommand": [
"python3",
"x.py",
"check",
"--build-dir",
"build-rust-analyzer",
"--compile-time-deps",
"--json-output"
]
},
"extraEnv": {
"RUSTC_BOOTSTRAP": "1"
},
"sysrootSrc": "./library"
},
"linkedProjects": [
"Cargo.toml",
"compiler/rustc_codegen_cranelift/Cargo.toml",
"compiler/rustc_codegen_gcc/Cargo.toml",
"library/Cargo.toml",
"src/bootstrap/Cargo.toml",
"src/tools/rust-analyzer/Cargo.toml"
],
"check": {
"invocationLocation": "root",
"invocationStrategy": "once",
"overrideCommand": [
"python3",
@ -34,26 +21,37 @@
"build-rust-analyzer"
]
},
"linkedProjects": [
"Cargo.toml",
"compiler/rustc_codegen_cranelift/Cargo.toml",
"compiler/rustc_codegen_gcc/Cargo.toml",
"library/Cargo.toml",
"src/bootstrap/Cargo.toml",
"src/tools/rust-analyzer/Cargo.toml"
],
"rustfmt": {
"overrideCommand": [
"build/host/rustfmt/bin/rustfmt",
"--edition=2024"
]
},
"procMacro": {
"enable": true,
"server": "build-rust-analyzer/host/stage0/libexec/rust-analyzer-proc-macro-srv"
"server": "build/host/stage0/libexec/rust-analyzer-proc-macro-srv"
},
"rustc": {
"source": "./Cargo.toml"
},
"rustfmt": {
"overrideCommand": [
"build-rust-analyzer/host/rustfmt/bin/rustfmt",
"--edition=2024"
]
"cargo": {
"sysrootSrc": "./library",
"extraEnv": {
"RUSTC_BOOTSTRAP": "1"
},
"buildScripts": {
"enable": true,
"invocationStrategy": "once",
"overrideCommand": [
"python3",
"x.py",
"check",
"--json-output"
"--compile-time-deps",
"--build-dir",
"build-rust-analyzer",
]
}
},
"server": {
"extraEnv": {

View file

@ -125,7 +125,7 @@ impl TestCx<'_> {
// `extern run_make_support;`.
.arg("--extern")
.arg(format!("run_make_support={}", &support_lib_path))
.arg("--edition=2021")
.arg("--edition=2024")
.arg(&self.testpaths.file.join("rmake.rs"))
.arg("-Cprefer-dynamic");

View file

@ -560,6 +560,18 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "embedded-io"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced"
[[package]]
name = "embedded-io"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d"
[[package]]
name = "ena"
version = "0.14.3"
@ -1456,6 +1468,7 @@ name = "mbe"
version = "0.0.0"
dependencies = [
"arrayvec",
"bitflags 2.9.4",
"cov-mark",
"expect-test",
"intern",
@ -1785,6 +1798,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24"
dependencies = [
"cobs",
"embedded-io 0.4.0",
"embedded-io 0.6.1",
"heapless",
"serde",
]
@ -1820,8 +1835,10 @@ dependencies = [
"indexmap",
"intern",
"paths",
"postcard",
"proc-macro-srv",
"rustc-hash 2.1.1",
"semver",
"serde",
"serde_derive",
"serde_json",

View file

@ -127,6 +127,7 @@ object = { version = "0.36.7", default-features = false, features = [
"macho",
"pe",
] }
postcard = {version = "1.1.3", features = ["alloc"]}
process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }

View file

@ -338,7 +338,7 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
let kind = |expander, file_id, m| {
let in_file = InFile::new(file_id, m);
match expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file),
MacroExpander::Declarative { styles } => MacroDefKind::Declarative(in_file, styles),
MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(in_file, it),
MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(in_file, it),
MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(in_file, it),

View file

@ -195,12 +195,16 @@ impl TypeRef {
TypeRef::Tuple(ThinVec::new())
}
pub fn walk(this: TypeRefId, map: &ExpressionStore, f: &mut impl FnMut(&TypeRef)) {
pub fn walk(this: TypeRefId, map: &ExpressionStore, f: &mut impl FnMut(TypeRefId, &TypeRef)) {
go(this, f, map);
fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) {
let type_ref = &map[type_ref];
f(type_ref);
fn go(
type_ref_id: TypeRefId,
f: &mut impl FnMut(TypeRefId, &TypeRef),
map: &ExpressionStore,
) {
let type_ref = &map[type_ref_id];
f(type_ref_id, type_ref);
match type_ref {
TypeRef::Fn(fn_) => {
fn_.params.iter().for_each(|&(_, param_type)| go(param_type, f, map))
@ -224,7 +228,7 @@ impl TypeRef {
};
}
fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) {
fn go_path(path: &Path, f: &mut impl FnMut(TypeRefId, &TypeRef), map: &ExpressionStore) {
if let Some(type_ref) = path.type_anchor() {
go(type_ref, f, map);
}

View file

@ -17,9 +17,8 @@ use thin_vec::ThinVec;
use crate::{
AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
LocalModuleId, Lookup, MacroCallStyles, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
db::DefDatabase,
nameres::MacroSubNs,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::Visibility,
};
@ -740,11 +739,15 @@ impl ItemScope {
let mut entries: Vec<_> = self.resolutions().collect();
entries.sort_by_key(|(name, _)| name.clone());
let print_macro_sub_ns =
|buf: &mut String, macro_id: MacroId| match MacroSubNs::from_id(db, macro_id) {
MacroSubNs::Bang => buf.push('!'),
MacroSubNs::Attr => buf.push('#'),
};
let print_macro_sub_ns = |buf: &mut String, macro_id: MacroId| {
let styles = crate::nameres::macro_styles_from_id(db, macro_id);
if styles.contains(MacroCallStyles::FN_LIKE) {
buf.push('!');
}
if styles.contains(MacroCallStyles::ATTR) || styles.contains(MacroCallStyles::DERIVE) {
buf.push('#');
}
};
for (name, def) in entries {
let display_name: &dyn fmt::Display = match &name {

View file

@ -61,8 +61,8 @@ use std::hash::{Hash, Hasher};
use base_db::{Crate, impl_intern_key};
use hir_expand::{
AstId, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
MacroDefKind,
AstId, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallStyles,
MacroDefId, MacroDefKind,
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
@ -403,7 +403,7 @@ bitflags::bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroExpander {
Declarative,
Declarative { styles: MacroCallStyles },
BuiltIn(BuiltinFnLikeExpander),
BuiltInAttr(BuiltinAttrExpander),
BuiltInDerive(BuiltinDeriveExpander),

View file

@ -77,7 +77,7 @@ use tt::TextRange;
use crate::{
AstId, BlockId, BlockLoc, CrateRootModuleId, ExternCrateId, FunctionId, FxIndexMap,
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
LocalModuleId, Lookup, MacroCallStyles, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
db::DefDatabase,
item_scope::{BuiltinShadowMode, ItemScope},
item_tree::TreeId,
@ -813,26 +813,25 @@ pub enum MacroSubNs {
Attr,
}
impl MacroSubNs {
pub(crate) fn from_id(db: &dyn DefDatabase, macro_id: MacroId) -> Self {
let expander = match macro_id {
MacroId::Macro2Id(it) => it.lookup(db).expander,
MacroId::MacroRulesId(it) => it.lookup(db).expander,
MacroId::ProcMacroId(it) => {
return match it.lookup(db).kind {
ProcMacroKind::CustomDerive | ProcMacroKind::Attr => Self::Attr,
ProcMacroKind::Bang => Self::Bang,
};
}
};
// Eager macros aren't *guaranteed* to be bang macros, but they *are* all bang macros currently.
match expander {
MacroExpander::Declarative
| MacroExpander::BuiltIn(_)
| MacroExpander::BuiltInEager(_) => Self::Bang,
MacroExpander::BuiltInAttr(_) | MacroExpander::BuiltInDerive(_) => Self::Attr,
pub(crate) fn macro_styles_from_id(db: &dyn DefDatabase, macro_id: MacroId) -> MacroCallStyles {
let expander = match macro_id {
MacroId::Macro2Id(it) => it.lookup(db).expander,
MacroId::MacroRulesId(it) => it.lookup(db).expander,
MacroId::ProcMacroId(it) => {
return match it.lookup(db).kind {
ProcMacroKind::CustomDerive => MacroCallStyles::DERIVE,
ProcMacroKind::Bang => MacroCallStyles::FN_LIKE,
ProcMacroKind::Attr => MacroCallStyles::ATTR,
};
}
};
match expander {
MacroExpander::Declarative { styles } => styles,
// Eager macros aren't *guaranteed* to be bang macros, but they *are* all bang macros currently.
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroCallStyles::FN_LIKE,
MacroExpander::BuiltInAttr(_) => MacroCallStyles::ATTR,
MacroExpander::BuiltInDerive(_) => MacroCallStyles::DERIVE,
}
}
@ -842,9 +841,19 @@ impl MacroSubNs {
/// We ignore resolutions from one sub-namespace when searching names in scope for another.
///
/// [rustc]: https://github.com/rust-lang/rust/blob/1.69.0/compiler/rustc_resolve/src/macros.rs#L75
fn sub_namespace_match(candidate: Option<MacroSubNs>, expected: Option<MacroSubNs>) -> bool {
match (candidate, expected) {
(Some(candidate), Some(expected)) => candidate == expected,
_ => true,
fn sub_namespace_match(
db: &dyn DefDatabase,
macro_id: MacroId,
expected: Option<MacroSubNs>,
) -> bool {
let candidate = macro_styles_from_id(db, macro_id);
match expected {
Some(MacroSubNs::Bang) => candidate.contains(MacroCallStyles::FN_LIKE),
Some(MacroSubNs::Attr) => {
candidate.contains(MacroCallStyles::ATTR) || candidate.contains(MacroCallStyles::DERIVE)
}
// If we aren't expecting a specific sub-namespace
// (e.g. in `use` declarations), match any macro.
None => true,
}
}

View file

@ -2300,7 +2300,10 @@ impl ModCollector<'_, '_> {
}
} else {
// Case 2: normal `macro_rules!` macro
MacroExpander::Declarative
let id = InFile::new(self.file_id(), ast_id);
let decl_expander = self.def_collector.db.decl_macro_expander(krate, id.upcast());
let styles = decl_expander.mac.rule_styles();
MacroExpander::Declarative { styles }
};
let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists();
@ -2369,7 +2372,10 @@ impl ModCollector<'_, '_> {
}
} else {
// Case 2: normal `macro`
MacroExpander::Declarative
let id = InFile::new(self.file_id(), ast_id);
let decl_expander = self.def_collector.db.decl_macro_expander(krate, id.upcast());
let styles = decl_expander.mac.rule_styles();
MacroExpander::Declarative { styles }
};
let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists();
@ -2429,12 +2435,7 @@ impl ModCollector<'_, '_> {
})
.or_else(|| def_map[self.module_id].scope.get(name).take_macros())
.or_else(|| Some(def_map.macro_use_prelude.get(name).copied()?.0))
.filter(|&id| {
sub_namespace_match(
Some(MacroSubNs::from_id(db, id)),
Some(MacroSubNs::Bang),
)
})
.filter(|&id| sub_namespace_match(db, id, Some(MacroSubNs::Bang)))
.map(|it| self.def_collector.db.macro_def(it))
})
},

View file

@ -85,10 +85,7 @@ impl PerNs {
db: &dyn DefDatabase,
expected: Option<MacroSubNs>,
) -> Self {
self.macros = self.macros.filter(|def| {
let this = MacroSubNs::from_id(db, def.def);
sub_namespace_match(Some(this), expected)
});
self.macros = self.macros.filter(|def| sub_namespace_match(db, def.def, expected));
self
}
@ -668,9 +665,7 @@ impl DefMap {
// FIXME: shadowing
.and_then(|it| it.last())
.copied()
.filter(|&id| {
sub_namespace_match(Some(MacroSubNs::from_id(db, id)), expected_macro_subns)
})
.filter(|&id| sub_namespace_match(db, id, expected_macro_subns))
.map_or_else(PerNs::none, |m| PerNs::macros(m, Visibility::Public, None));
let from_scope = self[module].scope.get(name).filter_macro(db, expected_macro_subns);
let from_builtin = match self.block {

View file

@ -222,6 +222,7 @@ pub struct S {}
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map_shim",
"parse_shim",
@ -235,7 +236,6 @@ pub struct S {}
"ast_id_map_shim",
"parse_macro_expansion_shim",
"macro_arg_shim",
"decl_macro_expander_shim",
]
"#]],
expect![[r#"
@ -404,6 +404,7 @@ pub struct S {}
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map_shim",
"parse_shim",
@ -423,7 +424,6 @@ pub struct S {}
"ast_id_map_shim",
"parse_macro_expansion_shim",
"macro_arg_shim",
"decl_macro_expander_shim",
"crate_local_def_map",
"proc_macros_for_crate_shim",
"file_item_tree_query",
@ -446,9 +446,9 @@ pub struct S {}
"file_item_tree_query",
"real_span_map_shim",
"macro_arg_shim",
"macro_arg_shim",
"decl_macro_expander_shim",
"macro_arg_shim",
"macro_arg_shim",
]
"#]],
);
@ -520,6 +520,7 @@ m!(Z);
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map_shim",
"parse_shim",
@ -533,7 +534,6 @@ m!(Z);
"ast_id_map_shim",
"parse_macro_expansion_shim",
"macro_arg_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map_shim",
"parse_macro_expansion_shim",

View file

@ -1651,3 +1651,132 @@ pub mod prelude {
"#]],
);
}
#[test]
fn macro_rules_mixed_style() {
check(
r#"
macro_rules! foo {
() => {};
attr() () => {};
derive() () => {};
}
use foo;
"#,
expect![[r#"
crate
- foo : macro!# (import)
- (legacy) foo : macro!#
"#]],
);
}
#[test]
fn macro_2_mixed_style() {
check(
r#"
macro foo {
() => {};
attr() () => {};
derive() () => {};
}
use foo;
"#,
expect![[r#"
crate
- foo : macro!#
"#]],
);
}
#[test]
fn macro_rules_attr() {
check(
r#"
macro_rules! my_attr {
attr() ($($tt:tt)*) => { fn attr_fn() {} }
}
#[my_attr]
enum MyEnum {}
"#,
expect![[r#"
crate
- attr_fn : value
- (legacy) my_attr : macro#
"#]],
);
}
#[test]
fn macro_2_attr() {
check(
r#"
macro my_attr {
attr() ($($tt:tt)*) => { fn attr_fn() {} }
}
#[my_attr]
enum MyEnum {}
"#,
expect![[r#"
crate
- attr_fn : value
- my_attr : macro#
"#]],
);
}
#[test]
fn macro_rules_derive() {
check(
r#"
//- minicore: derive
macro_rules! MyDerive {
derive() ($($tt:tt)*) => { fn derived_fn() {} }
}
#[derive(MyDerive)]
enum MyEnum {}
"#,
expect![[r#"
crate
- MyEnum : type
- derived_fn : value
- (legacy) MyDerive : macro#
"#]],
);
}
#[test]
fn macro_2_derive() {
check(
r#"
//- minicore: derive
macro MyDerive {
derive() ($($tt:tt)*) => { fn derived_fn() {} }
}
#[derive(MyDerive)]
enum MyEnum {}
"#,
expect![[r#"
crate
- MyDerive : macro#
- MyEnum : type
- derived_fn : value
"#]],
);
}

View file

@ -297,9 +297,9 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt, span, loc.def.edition)
}
MacroDefKind::Declarative(it, _) => db
.decl_macro_expander(loc.krate, it)
.expand_unhygienic(tt, loc.kind.call_style(), span, loc.def.edition),
MacroDefKind::BuiltIn(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
@ -585,7 +585,7 @@ fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr>
impl TokenExpander {
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
MacroDefKind::Declarative(ast_id, _) => {
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
}
MacroDefKind::BuiltIn(_, expander) => TokenExpander::BuiltIn(expander),
@ -618,48 +618,46 @@ fn macro_expand(
db.macro_arg_considering_derives(macro_call_id, &loc.kind);
let arg = &*macro_arg;
let res =
match loc.def.kind {
MacroDefKind::Declarative(id) => db
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInEager(_, it) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
return ExpandResult::ok(CowArc::Arc(macro_arg.clone()))
.zip_val(None);
}
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
};
let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
if let Some(EagerCallInfo { error, .. }) = eager {
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
let res = match loc.def.kind {
MacroDefKind::Declarative(id, _) => db
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInEager(_, it) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
return ExpandResult::ok(CowArc::Arc(macro_arg.clone())).zip_val(None);
}
res.zip_val(None)
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
};
let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
if let Some(EagerCallInfo { error, .. }) = eager {
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
}
MacroDefKind::BuiltInAttr(_, it) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
res.zip_val(None)
}
MacroDefKind::ProcMacro(_, _, _) => unreachable!(),
};
res.zip_val(None)
}
MacroDefKind::BuiltInAttr(_, it) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
res.zip_val(None)
}
MacroDefKind::ProcMacro(_, _, _) => unreachable!(),
};
(ExpandResult { value: res.value, err: res.err }, span)
}
};

View file

@ -10,6 +10,7 @@ use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
MacroCallStyle,
attrs::RawAttrs,
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
@ -46,6 +47,7 @@ impl DeclarativeMacroExpander {
s.ctx =
apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
},
loc.kind.call_style(),
span,
loc.def.edition,
)
@ -56,6 +58,7 @@ impl DeclarativeMacroExpander {
pub fn expand_unhygienic(
&self,
tt: tt::TopSubtree,
call_style: MacroCallStyle,
call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::TopSubtree> {
@ -66,7 +69,7 @@ impl DeclarativeMacroExpander {
),
None => self
.mac
.expand(&tt, |_| (), call_site, def_site_edition)
.expand(&tt, |_| (), call_style, call_site, def_site_edition)
.map(TupleExt::head)
.map_err(Into::into),
}

View file

@ -238,7 +238,7 @@ fn eager_macro_recur(
None => ExpandResult { value: None, err },
}
}
MacroDefKind::Declarative(_)
MacroDefKind::Declarative(..)
| MacroDefKind::BuiltIn(..)
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)

View file

@ -61,7 +61,7 @@ pub use crate::{
};
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, ValueResult};
pub use mbe::{DeclarativeMacro, MacroCallStyle, MacroCallStyles, ValueResult};
pub mod tt {
pub use span::Span;
@ -266,7 +266,7 @@ pub struct MacroDefId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroDefKind {
Declarative(AstId<ast::Macro>),
Declarative(AstId<ast::Macro>, MacroCallStyles),
BuiltIn(AstId<ast::Macro>, BuiltinFnLikeExpander),
BuiltInAttr(AstId<ast::Macro>, BuiltinAttrExpander),
BuiltInDerive(AstId<ast::Macro>, BuiltinDeriveExpander),
@ -340,6 +340,16 @@ pub enum MacroCallKind {
},
}
impl MacroCallKind {
pub(crate) fn call_style(&self) -> MacroCallStyle {
match self {
MacroCallKind::FnLike { .. } => MacroCallStyle::FnLike,
MacroCallKind::Derive { .. } => MacroCallStyle::Derive,
MacroCallKind::Attr { .. } => MacroCallStyle::Attr,
}
}
}
impl HirFileId {
pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self {
@ -511,7 +521,7 @@ impl MacroDefId {
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
match self.kind {
MacroDefKind::Declarative(id)
MacroDefKind::Declarative(id, _)
| MacroDefKind::BuiltIn(id, _)
| MacroDefKind::BuiltInAttr(id, _)
| MacroDefKind::BuiltInDerive(id, _)
@ -527,7 +537,7 @@ impl MacroDefId {
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
match self.kind {
MacroDefKind::ProcMacro(id, ..) => Either::Right(id),
MacroDefKind::Declarative(id)
MacroDefKind::Declarative(id, _)
| MacroDefKind::BuiltIn(id, _)
| MacroDefKind::BuiltInAttr(id, _)
| MacroDefKind::BuiltInDerive(id, _)
@ -540,18 +550,22 @@ impl MacroDefId {
}
pub fn is_attribute(&self) -> bool {
matches!(
self.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
)
match self.kind {
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr) => {
true
}
MacroDefKind::Declarative(_, styles) => styles.contains(MacroCallStyles::ATTR),
_ => false,
}
}
pub fn is_derive(&self) -> bool {
matches!(
self.kind,
match self.kind {
MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
)
| MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
MacroDefKind::Declarative(_, styles) => styles.contains(MacroCallStyles::DERIVE),
_ => false,
}
}
pub fn is_fn_like(&self) -> bool {

View file

@ -41,7 +41,7 @@ use hir_def::{
layout::Integer,
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
signatures::{ConstSignature, StaticSignature},
type_ref::{ConstRef, LifetimeRefId, TypeRefId},
type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId},
};
use hir_expand::{mod_path::ModPath, name::Name};
use indexmap::IndexSet;
@ -60,6 +60,7 @@ use triomphe::Arc;
use crate::{
ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures,
collect_type_inference_vars,
db::{HirDatabase, InternedClosureId, InternedOpaqueTyId},
infer::{
coerce::{CoerceMany, DynamicCoerceMany},
@ -497,6 +498,7 @@ pub struct InferenceResult<'db> {
/// unresolved or missing subpatterns or subpatterns of mismatched types.
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
pub(crate) type_of_type_placeholder: ArenaMap<TypeRefId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
/// Whether there are any type-mismatching errors in the result.
@ -542,6 +544,7 @@ impl<'db> InferenceResult<'db> {
type_of_expr: Default::default(),
type_of_pat: Default::default(),
type_of_binding: Default::default(),
type_of_type_placeholder: Default::default(),
type_of_opaque: Default::default(),
type_mismatches: Default::default(),
has_errors: Default::default(),
@ -606,6 +609,12 @@ impl<'db> InferenceResult<'db> {
_ => None,
})
}
pub fn placeholder_types(&self) -> impl Iterator<Item = (TypeRefId, &Ty<'db>)> {
self.type_of_type_placeholder.iter()
}
pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(type_ref).copied()
}
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) {
self.closure_info.get(&closure).unwrap()
}
@ -1014,6 +1023,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
type_of_expr,
type_of_pat,
type_of_binding,
type_of_type_placeholder,
type_of_opaque,
type_mismatches,
has_errors,
@ -1046,6 +1056,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
*has_errors = *has_errors || ty.references_non_lt_error();
}
type_of_binding.shrink_to_fit();
for ty in type_of_type_placeholder.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
}
type_of_type_placeholder.shrink_to_fit();
type_of_opaque.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
@ -1285,6 +1300,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result.type_of_pat.insert(pat, ty);
}
fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) {
self.result.type_of_type_placeholder.insert(type_ref, ty);
}
fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) {
self.result.type_of_binding.insert(id, ty);
}
@ -1333,7 +1352,27 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
) -> Ty<'db> {
let ty = self
.with_ty_lowering(store, type_source, lifetime_elision, |ctx| ctx.lower_ty(type_ref));
self.process_user_written_ty(ty)
let ty = self.process_user_written_ty(ty);
// Record the association from placeholders' TypeRefId to type variables.
// We only record them if their number matches. This assumes TypeRef::walk and TypeVisitable process the items in the same order.
let type_variables = collect_type_inference_vars(&ty);
let mut placeholder_ids = vec![];
TypeRef::walk(type_ref, store, &mut |type_ref_id, type_ref| {
if matches!(type_ref, TypeRef::Placeholder) {
placeholder_ids.push(type_ref_id);
}
});
if placeholder_ids.len() == type_variables.len() {
for (placeholder_id, type_variable) in
placeholder_ids.into_iter().zip(type_variables.into_iter())
{
self.write_type_placeholder_ty(placeholder_id, type_variable);
}
}
ty
}
pub(crate) fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {

View file

@ -569,6 +569,35 @@ where
Vec::from_iter(collector.params)
}
struct TypeInferenceVarCollector<'db> {
type_inference_vars: Vec<Ty<'db>>,
}
impl<'db> rustc_type_ir::TypeVisitor<DbInterner<'db>> for TypeInferenceVarCollector<'db> {
type Result = ();
fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
use crate::rustc_type_ir::Flags;
if ty.is_ty_var() {
self.type_inference_vars.push(ty);
} else if ty.flags().intersects(rustc_type_ir::TypeFlags::HAS_TY_INFER) {
ty.super_visit_with(self);
} else {
// Fast path: don't visit inner types (e.g. generic arguments) when `flags` indicate
// that there are no placeholders.
}
}
}
pub fn collect_type_inference_vars<'db, T>(value: &T) -> Vec<Ty<'db>>
where
T: ?Sized + rustc_type_ir::TypeVisitable<DbInterner<'db>>,
{
let mut collector = TypeInferenceVarCollector { type_inference_vars: vec![] };
value.visit_with(&mut collector);
collector.type_inference_vars
}
pub fn known_const_to_ast<'db>(
konst: Const<'db>,
db: &'db dyn HirDatabase,

View file

@ -23,6 +23,7 @@ use hir_def::{
item_scope::ItemScope,
nameres::DefMap,
src::HasSource,
type_ref::TypeRefId,
};
use hir_expand::{FileRange, InFile, db::ExpandDatabase};
use itertools::Itertools;
@ -219,6 +220,24 @@ fn check_impl(
}
}
}
for (type_ref, ty) in inference_result.placeholder_types() {
let node = match type_node(&body_source_map, type_ref, &db) {
Some(value) => value,
None => continue,
};
let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) {
let actual = salsa::attach(&db, || {
if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db, display_target).to_string()
}
});
assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
}
}
let mut buf = String::new();
@ -275,6 +294,20 @@ fn pat_node(
})
}
fn type_node(
body_source_map: &BodySourceMap,
type_ref: TypeRefId,
db: &TestDB,
) -> Option<InFile<SyntaxNode>> {
Some(match body_source_map.type_syntax(type_ref) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => return None,
})
}
fn infer(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
infer_with_mismatches(ra_fixture, false)
}

View file

@ -246,3 +246,22 @@ fn test() {
"#,
);
}
#[test]
fn type_placeholder_type() {
check_types_source_code(
r#"
struct S<T>(T);
fn test() {
let f: S<_> = S(3);
//^ i32
let f: [_; _] = [4_u32, 5, 6];
//^ u32
let f: (_, _, _) = (1_u32, 1_i32, false);
//^ u32
//^ i32
//^ bool
}
"#,
);
}

View file

@ -3184,7 +3184,7 @@ impl Macro {
pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
match self.id {
MacroId::Macro2Id(it) => match it.lookup(db).expander {
MacroExpander::Declarative => MacroKind::Declarative,
MacroExpander::Declarative { .. } => MacroKind::Declarative,
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
MacroKind::DeclarativeBuiltIn
}
@ -3192,7 +3192,7 @@ impl Macro {
MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
},
MacroId::MacroRulesId(it) => match it.lookup(db).expander {
MacroExpander::Declarative => MacroKind::Declarative,
MacroExpander::Declarative { .. } => MacroKind::Declarative,
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
MacroKind::DeclarativeBuiltIn
}

View file

@ -21,7 +21,7 @@ use hir_def::{
lang_item::LangItem,
nameres::MacroSubNs,
resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
type_ref::{Mutability, TypeRefId},
type_ref::{Mutability, TypeRef, TypeRefId},
};
use hir_expand::{
HirFileId, InFile,
@ -267,8 +267,11 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
ty: &ast::Type,
) -> Option<Type<'db>> {
let interner = DbInterner::new_with(db, None, None);
let type_ref = self.type_id(ty)?;
let ty = TyLoweringContext::new(
let mut ty = TyLoweringContext::new(
db,
&self.resolver,
self.store()?,
@ -279,6 +282,31 @@ impl<'db> SourceAnalyzer<'db> {
LifetimeElisionKind::Infer,
)
.lower_ty(type_ref);
// Try and substitute unknown types using InferenceResult
if let Some(infer) = self.infer()
&& let Some(store) = self.store()
{
let mut inferred_types = vec![];
TypeRef::walk(type_ref, store, &mut |type_ref_id, type_ref| {
if matches!(type_ref, TypeRef::Placeholder) {
inferred_types.push(infer.type_of_type_placeholder(type_ref_id));
}
});
let mut inferred_types = inferred_types.into_iter();
let substituted_ty = hir_ty::next_solver::fold::fold_tys(interner, ty, |ty| {
if ty.is_ty_error() { inferred_types.next().flatten().unwrap_or(ty) } else { ty }
});
// Only used the result if the placeholder and unknown type counts matched
let success =
inferred_types.next().is_none() && !substituted_ty.references_non_lt_error();
if success {
ty = substituted_ty;
}
}
Some(Type::new_with_resolver(db, &self.resolver, ty))
}

View file

@ -2500,6 +2500,40 @@ impl dep::Foo for Bar {
${0:todo!()}
}
}
"#,
);
}
#[test]
fn regression_test_for_when_impl_for_unit() {
check_assist(
add_missing_impl_members,
r#"
trait Test {
fn f<B>()
where
B: IntoIterator,
<B as IntoIterator>::Item: Copy;
}
impl Test for () {
$0
}
"#,
r#"
trait Test {
fn f<B>()
where
B: IntoIterator,
<B as IntoIterator>::Item: Copy;
}
impl Test for () {
fn f<B>()
where
B: IntoIterator,
<B as IntoIterator>::Item: Copy {
${0:todo!()}
}
}
"#,
);
}

View file

@ -1,4 +1,5 @@
use either::Either;
use hir::HirDisplay;
use ide_db::syntax_helpers::node_ext::walk_ty;
use syntax::{
ast::{self, AstNode, HasGenericArgs, HasGenericParams, HasName, edit::IndentLevel, make},
@ -39,6 +40,15 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
);
let target = ty.syntax().text_range();
let resolved_ty = ctx.sema.resolve_type(&ty)?;
let resolved_ty = if !resolved_ty.contains_unknown() {
let module = ctx.sema.scope(ty.syntax())?.module();
let resolved_ty = resolved_ty.display_source_code(ctx.db(), module.into(), false).ok()?;
make::ty(&resolved_ty)
} else {
ty.clone()
};
acc.add(
AssistId::refactor_extract("extract_type_alias"),
"Extract type as type alias",
@ -72,7 +82,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Insert new alias
let ty_alias =
make::ty_alias(None, "Type", generic_params, None, None, Some((ty, None)))
make::ty_alias(None, "Type", generic_params, None, None, Some((resolved_ty, None)))
.clone_for_update();
if let Some(cap) = ctx.config.snippet_cap
@ -391,4 +401,50 @@ where
"#,
);
}
#[test]
fn inferred_generic_type_parameter() {
check_assist(
extract_type_alias,
r#"
struct Wrap<T>(T);
fn main() {
let wrap: $0Wrap<_>$0 = Wrap::<_>(3i32);
}
"#,
r#"
struct Wrap<T>(T);
type $0Type = Wrap<i32>;
fn main() {
let wrap: Type = Wrap::<_>(3i32);
}
"#,
)
}
#[test]
fn inferred_type() {
check_assist(
extract_type_alias,
r#"
struct Wrap<T>(T);
fn main() {
let wrap: Wrap<$0_$0> = Wrap::<_>(3i32);
}
"#,
r#"
struct Wrap<T>(T);
type $0Type = i32;
fn main() {
let wrap: Wrap<Type> = Wrap::<_>(3i32);
}
"#,
)
}
}

View file

@ -1,13 +1,12 @@
use std::slice;
use ide_db::assists::GroupLabel;
use itertools::Itertools;
use stdx::to_lower_snake_case;
use syntax::ast::HasVisibility;
use syntax::ast::{self, AstNode, HasName};
use crate::{
AssistContext, AssistId, Assists,
utils::{add_method_to_adt, find_struct_impl},
utils::{add_method_to_adt, find_struct_impl, is_selected},
};
// Assist: generate_enum_is_method
@ -41,20 +40,21 @@ use crate::{
// ```
pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?;
let parent_enum = ast::Adt::Enum(variant.parent_enum());
let pattern_suffix = match variant.kind() {
ast::StructKind::Record(_) => " { .. }",
ast::StructKind::Tuple(_) => "(..)",
ast::StructKind::Unit => "",
};
let variants = variant
.parent_enum()
.variant_list()?
.variants()
.filter(|it| is_selected(it, ctx.selection_trimmed(), true))
.collect::<Vec<_>>();
let methods = variants.iter().map(Method::new).collect::<Option<Vec<_>>>()?;
let enum_name = parent_enum.name()?;
let enum_lowercase_name = to_lower_snake_case(&enum_name.to_string()).replace('_', " ");
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
let fn_names = methods.iter().map(|it| it.fn_name.clone()).collect::<Vec<_>>();
stdx::never!(variants.is_empty());
// Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_names)?;
let target = variant.syntax().text_range();
acc.add_group(
@ -64,21 +64,47 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
target,
|builder| {
let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let method = format!(
" /// Returns `true` if the {enum_lowercase_name} is [`{variant_name}`].
let method = methods
.iter()
.map(|Method { pattern_suffix, fn_name, variant_name }| {
format!(
" \
/// Returns `true` if the {enum_lowercase_name} is [`{variant_name}`].
///
/// [`{variant_name}`]: {enum_name}::{variant_name}
#[must_use]
{vis}fn {fn_name}(&self) -> bool {{
matches!(self, Self::{variant_name}{pattern_suffix})
}}",
);
)
})
.join("\n\n");
add_method_to_adt(builder, &parent_enum, impl_def, &method);
},
)
}
struct Method {
pattern_suffix: &'static str,
fn_name: String,
variant_name: ast::Name,
}
impl Method {
fn new(variant: &ast::Variant) -> Option<Self> {
let pattern_suffix = match variant.kind() {
ast::StructKind::Record(_) => " { .. }",
ast::StructKind::Tuple(_) => "(..)",
ast::StructKind::Unit => "",
};
let variant_name = variant.name()?;
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
Some(Method { pattern_suffix, fn_name, variant_name })
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -113,6 +139,42 @@ impl Variant {
);
}
#[test]
fn test_generate_enum_is_from_multiple_variant() {
check_assist(
generate_enum_is_method,
r#"
enum Variant {
Undefined,
$0Minor,
M$0ajor,
}"#,
r#"enum Variant {
Undefined,
Minor,
Major,
}
impl Variant {
/// Returns `true` if the variant is [`Minor`].
///
/// [`Minor`]: Variant::Minor
#[must_use]
fn is_minor(&self) -> bool {
matches!(self, Self::Minor)
}
/// Returns `true` if the variant is [`Major`].
///
/// [`Major`]: Variant::Major
#[must_use]
fn is_major(&self) -> bool {
matches!(self, Self::Major)
}
}"#,
);
}
#[test]
fn test_generate_enum_is_already_implemented() {
check_assist_not_applicable(

View file

@ -1,5 +1,3 @@
use std::slice;
use ide_db::assists::GroupLabel;
use itertools::Itertools;
use stdx::to_lower_snake_case;
@ -8,7 +6,7 @@ use syntax::ast::{self, AstNode, HasName};
use crate::{
AssistContext, AssistId, Assists,
utils::{add_method_to_adt, find_struct_impl},
utils::{add_method_to_adt, find_struct_impl, is_selected},
};
// Assist: generate_enum_try_into_method
@ -128,29 +126,22 @@ fn generate_enum_projection_method(
} = props;
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?;
let parent_enum = ast::Adt::Enum(variant.parent_enum());
let (pattern_suffix, field_type, bound_name) = match variant.kind() {
ast::StructKind::Record(record) => {
let (field,) = record.fields().collect_tuple()?;
let name = field.name()?.to_string();
let ty = field.ty()?;
let pattern_suffix = format!(" {{ {name} }}");
(pattern_suffix, ty, name)
}
ast::StructKind::Tuple(tuple) => {
let (field,) = tuple.fields().collect_tuple()?;
let ty = field.ty()?;
("(v)".to_owned(), ty, "v".to_owned())
}
ast::StructKind::Unit => return None,
};
let fn_name = format!("{fn_name_prefix}_{}", &to_lower_snake_case(&variant_name.text()));
let variants = variant
.parent_enum()
.variant_list()?
.variants()
.filter(|it| is_selected(it, ctx.selection_trimmed(), true))
.collect::<Vec<_>>();
let methods = variants
.iter()
.map(|variant| Method::new(variant, fn_name_prefix))
.collect::<Option<Vec<_>>>()?;
let fn_names = methods.iter().map(|it| it.fn_name.clone()).collect::<Vec<_>>();
stdx::never!(variants.is_empty());
// Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_names)?;
let target = variant.syntax().text_range();
acc.add_group(
@ -161,29 +152,66 @@ fn generate_enum_projection_method(
|builder| {
let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let field_type_syntax = field_type.syntax();
let must_use = if ctx.config.assist_emit_must_use { "#[must_use]\n " } else { "" };
let must_use = if ctx.config.assist_emit_must_use {
"#[must_use]\n "
} else {
""
};
let method = format!(
" {must_use}{vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type_syntax}{return_suffix} {{
let method = methods
.iter()
.map(|Method { pattern_suffix, field_type, bound_name, fn_name, variant_name }| {
format!(
" \
{must_use}{vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type}{return_suffix} {{
if let Self::{variant_name}{pattern_suffix} = self {{
{happy_case}({bound_name})
}} else {{
{sad_case}
}}
}}"
);
)
})
.join("\n\n");
add_method_to_adt(builder, &parent_enum, impl_def, &method);
},
)
}
struct Method {
pattern_suffix: String,
field_type: ast::Type,
bound_name: String,
fn_name: String,
variant_name: ast::Name,
}
impl Method {
fn new(variant: &ast::Variant, fn_name_prefix: &str) -> Option<Self> {
let variant_name = variant.name()?;
let fn_name = format!("{fn_name_prefix}_{}", &to_lower_snake_case(&variant_name.text()));
match variant.kind() {
ast::StructKind::Record(record) => {
let (field,) = record.fields().collect_tuple()?;
let name = field.name()?.to_string();
let field_type = field.ty()?;
let pattern_suffix = format!(" {{ {name} }}");
Some(Method { pattern_suffix, field_type, bound_name: name, fn_name, variant_name })
}
ast::StructKind::Tuple(tuple) => {
let (field,) = tuple.fields().collect_tuple()?;
let field_type = field.ty()?;
Some(Method {
pattern_suffix: "(v)".to_owned(),
field_type,
bound_name: "v".to_owned(),
variant_name,
fn_name,
})
}
ast::StructKind::Unit => None,
}
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -216,6 +244,42 @@ impl Value {
);
}
#[test]
fn test_generate_enum_multiple_try_into_tuple_variant() {
check_assist(
generate_enum_try_into_method,
r#"
enum Value {
Unit(()),
$0Number(i32),
Text(String)$0,
}"#,
r#"enum Value {
Unit(()),
Number(i32),
Text(String),
}
impl Value {
fn try_into_number(self) -> Result<i32, Self> {
if let Self::Number(v) = self {
Ok(v)
} else {
Err(self)
}
}
fn try_into_text(self) -> Result<String, Self> {
if let Self::Text(v) = self {
Ok(v)
} else {
Err(self)
}
}
}"#,
);
}
#[test]
fn test_generate_enum_try_into_already_implemented() {
check_assist_not_applicable(
@ -323,6 +387,42 @@ impl Value {
);
}
#[test]
fn test_generate_enum_as_multiple_tuple_variant() {
check_assist(
generate_enum_as_method,
r#"
enum Value {
Unit(()),
$0Number(i32),
Text(String)$0,
}"#,
r#"enum Value {
Unit(()),
Number(i32),
Text(String),
}
impl Value {
fn as_number(&self) -> Option<&i32> {
if let Self::Number(v) = self {
Some(v)
} else {
None
}
}
fn as_text(&self) -> Option<&String> {
if let Self::Text(v) = self {
Some(v)
} else {
None
}
}
}"#,
);
}
#[test]
fn test_generate_enum_as_record_variant() {
check_assist(

View file

@ -59,9 +59,14 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
.into_iter()
.map(|x| match x {
hir::CfgAtom::Flag(key) => (key.as_str(), "".into()),
hir::CfgAtom::KeyValue { key, .. } => {
(key.as_str(), SmolStr::from_iter([key.as_str(), " = $0"]))
}
hir::CfgAtom::KeyValue { key, .. } => (
key.as_str(),
if ctx.config.snippet_cap.is_some() {
SmolStr::from_iter([key.as_str(), " = $0"])
} else {
SmolStr::default()
},
),
})
.chain(CFG_CONDITION.iter().map(|&(k, snip)| (k, SmolStr::new_static(snip))))
.unique_by(|&(s, _)| s)

View file

@ -91,9 +91,9 @@ pub(crate) fn complete_dot(
// its return type, so we instead check for `<&Self as IntoIterator>::IntoIter`.
// Does <&receiver_ty as IntoIterator>::IntoIter` exist? Assume `iter` is valid
let iter = receiver_ty
.strip_references()
.add_reference(hir::Mutability::Shared)
.into_iterator_iter(ctx.db)
.autoderef(ctx.db)
.map(|ty| ty.strip_references().add_reference(hir::Mutability::Shared))
.find_map(|ty| ty.into_iterator_iter(ctx.db))
.map(|ty| (ty, SmolStr::new_static("iter()")));
// Does <receiver_ty as IntoIterator>::IntoIter` exist?
let into_iter = || {
@ -1466,6 +1466,40 @@ fn foo() {
me into_iter().nth() (as Iterator) fn(&mut self, usize) -> Option<<Self as Iterator>::Item>
"#]],
);
check_no_kw(
r#"
//- minicore: iterator, deref
struct Foo;
impl Foo { fn iter(&self) -> Iter { Iter } }
impl IntoIterator for &Foo {
type Item = ();
type IntoIter = Iter;
fn into_iter(self) -> Self::IntoIter { Iter }
}
struct Ref;
impl core::ops::Deref for Ref {
type Target = Foo;
fn deref(&self) -> &Self::Target { &Foo }
}
struct Iter;
impl Iterator for Iter {
type Item = ();
fn next(&mut self) -> Option<Self::Item> { None }
}
fn foo() {
Ref.$0
}
"#,
expect![[r#"
me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
me iter() fn(&self) -> Iter
me iter().by_ref() (as Iterator) fn(&mut self) -> &mut Self
me iter().into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
me iter().next() (as Iterator) fn(&mut self) -> Option<<Self as Iterator>::Item>
me iter().nth() (as Iterator) fn(&mut self, usize) -> Option<<Self as Iterator>::Item>
"#]],
);
}
#[test]

View file

@ -101,6 +101,7 @@ pub(crate) fn complete_pattern(
hir::ModuleDef::Const(..) => refutable,
hir::ModuleDef::Module(..) => true,
hir::ModuleDef::Macro(mac) => mac.is_fn_like(ctx.db),
hir::ModuleDef::TypeAlias(_) => true,
_ => false,
},
hir::ScopeDef::ImplSelfType(impl_) => match impl_.self_ty(ctx.db).as_adt() {

View file

@ -657,7 +657,7 @@ fn expected_type_and_name<'db>(
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
sema,
token.clone(),
token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
(Some(ap.ty), name)
@ -1635,7 +1635,7 @@ fn classify_name_ref<'db>(
&& let Some(t) = top.first_token()
&& let Some(prev) =
t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
&& ![T![;], T!['}'], T!['{']].contains(&prev.kind())
&& ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind())
{
// This was inferred to be an item position path, but it seems
// to be part of some other broken node which leaked into an item

View file

@ -90,6 +90,20 @@ fn bar(x: u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(, $0); }
fn bar(x: u32, y: i32) {}
"#,
expect![[r#"ty: i32, name: y"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(, c$0); }
fn bar(x: u32, y: i32) {}
"#,
expect![[r#"ty: i32, name: y"#]],
);
}
#[test]

View file

@ -105,6 +105,40 @@ fn in_item_list_after_attr() {
)
}
#[test]
fn in_item_list_after_inner_attr() {
check_with_base_items(
r#"#![attr] $0"#,
expect![[r#"
ma makro!() macro_rules! makro
md module
kw async
kw const
kw crate::
kw enum
kw extern
kw fn
kw impl
kw impl for
kw mod
kw pub
kw pub(crate)
kw pub(super)
kw self::
kw static
kw struct
kw trait
kw type
kw union
kw unsafe
kw use
sn macro_rules
sn tfn (Test function)
sn tmod (Test module)
"#]],
)
}
#[test]
fn in_qualified_path() {
check_with_base_items(

View file

@ -821,6 +821,37 @@ fn f(x: EnumAlias<u8>) {
);
}
#[test]
fn through_alias_it_self() {
check(
r#"
enum Enum<T> {
Unit,
Tuple(T),
}
type EnumAlias<T> = Enum<T>;
fn f(x: EnumAlias<u8>) {
match x {
$0 => (),
_ => (),
}
}
"#,
expect![[r#"
en Enum
ta EnumAlias
bn Enum::Tuple() Enum::Tuple($1)$0
bn Enum::Unit Enum::Unit$0
kw mut
kw ref
"#]],
);
}
#[test]
fn pat_no_unstable_item_on_stable() {
check(

View file

@ -5,7 +5,7 @@ use hir::{InFile, Semantics, Type};
use parser::T;
use span::TextSize;
use syntax::{
AstNode, NodeOrToken, SyntaxToken,
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken,
ast::{self, AstChildren, HasArgList, HasAttrs, HasName},
match_ast,
};
@ -102,8 +102,7 @@ pub fn callable_for_node<'db>(
arg_list
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter(|t| t.kind() == T![,])
.filter_map(into_comma)
.take_while(|t| t.text_range().start() <= offset)
.count()
});
@ -162,8 +161,7 @@ pub fn generic_def_for_node(
let active_param = generic_arg_list
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter(|t| t.kind() == T![,])
.filter_map(into_comma)
.take_while(|t| t.text_range().start() <= token.text_range().start())
.count();
@ -174,3 +172,12 @@ pub fn generic_def_for_node(
Some((def, active_param, first_arg_is_non_lifetime, variant))
}
fn into_comma(it: NodeOrToken<SyntaxNode, SyntaxToken>) -> Option<SyntaxToken> {
let token = match it {
NodeOrToken::Token(it) => it,
NodeOrToken::Node(node) if node.kind() == SyntaxKind::ERROR => node.first_token()?,
NodeOrToken::Node(_) => return None,
};
(token.kind() == T![,]).then_some(token)
}

View file

@ -356,7 +356,7 @@
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroCallId(
Id(3800),
Id(3c00),
),
),
ptr: SyntaxNodePtr {
@ -694,7 +694,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3401,
3801,
),
),
},
@ -796,7 +796,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3400,
3800,
),
),
},
@ -862,7 +862,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3401,
3801,
),
),
},

View file

@ -40,6 +40,7 @@ mod implicit_static;
mod implied_dyn_trait;
mod lifetime;
mod param_name;
mod placeholders;
mod ra_fixture;
mod range_exclusive;
@ -291,6 +292,10 @@ fn hints(
implied_dyn_trait::hints(hints, famous_defs, config, Either::Right(dyn_));
Some(())
},
ast::Type::InferType(placeholder) => {
placeholders::type_hints(hints, famous_defs, config, display_target, placeholder);
Some(())
},
_ => Some(()),
},
ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, it),

View file

@ -0,0 +1,76 @@
//! Implementation of type placeholder inlay hints:
//! ```no_run
//! let a = Vec<_> = vec![4];
//! //^ = i32
//! ```
use hir::DisplayTarget;
use ide_db::famous_defs::FamousDefs;
use syntax::{
AstNode,
ast::{InferType, Type},
};
use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, inlay_hints::label_of_ty};
pub(super) fn type_hints(
acc: &mut Vec<InlayHint>,
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig<'_>,
display_target: DisplayTarget,
placeholder: InferType,
) -> Option<()> {
if !config.type_hints {
return None;
}
let syntax = placeholder.syntax();
let range = syntax.text_range();
let ty = sema.resolve_type(&Type::InferType(placeholder))?;
let mut label = label_of_ty(famous_defs, config, &ty, display_target)?;
label.prepend_str("= ");
acc.push(InlayHint {
range,
kind: InlayKind::Type,
label,
text_edit: None,
position: InlayHintPosition::After,
pad_left: true,
pad_right: false,
resolve_parent: None,
});
Some(())
}
#[cfg(test)]
mod tests {
use crate::{
InlayHintsConfig,
inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
};
#[track_caller]
fn check_type_infer(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture);
}
#[test]
fn inferred_types() {
check_type_infer(
r#"
struct S<T>(T);
fn foo() {
let t: (_, _, [_; _]) = (1_u32, S(2), [false] as _);
//^ = u32
//^ = S<i32>
//^ = bool
//^ = [bool; 1]
}
"#,
);
}
}

View file

@ -127,6 +127,7 @@ define_symbols! {
as_str,
asm,
assert,
attr,
attributes,
begin_panic,
bench,

View file

@ -96,12 +96,13 @@ pub fn load_workspace_into_db(
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
)
it.and_then(|it| {
ProcMacroClient::spawn(&it, extra_env, ws.toolchain.as_ref()).map_err(Into::into)
})
.map_err(|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
}),
ProcMacroServerChoice::Explicit(path) => {
Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
Some(ProcMacroClient::spawn(path, extra_env, ws.toolchain.as_ref()).map_err(|e| {
ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
}))
}

View file

@ -18,6 +18,7 @@ rustc-hash.workspace = true
smallvec.workspace = true
arrayvec.workspace = true
ra-ap-rustc_lexer.workspace = true
bitflags.workspace = true
# local deps
parser.workspace = true

View file

@ -16,7 +16,7 @@ use syntax_bridge::{
use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
DeclarativeMacro,
DeclarativeMacro, MacroCallStyle,
parser::{MetaVarKind, Op, RepeatKind, Separator},
};
@ -52,7 +52,8 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
let res =
rules[&id].expand(&tt, |_| (), MacroCallStyle::FnLike, DUMMY, Edition::CURRENT);
assert!(res.err.is_none());
res.value.0.0.len()
})
@ -123,7 +124,11 @@ fn invocation_fixtures(
}
let subtree = builder.build();
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
if it
.expand(&subtree, |_| (), MacroCallStyle::FnLike, DUMMY, Edition::CURRENT)
.err
.is_none()
{
res.push((name.clone(), subtree));
break;
}

View file

@ -9,17 +9,26 @@ use intern::Symbol;
use rustc_hash::FxHashMap;
use span::{Edition, Span};
use crate::{ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex, parser::MetaVarKind};
use crate::{
ExpandError, ExpandErrorKind, ExpandResult, MacroCallStyle, MatchedArmIndex,
parser::MetaVarKind,
};
pub(crate) fn expand_rules(
rules: &[crate::Rule],
input: &tt::TopSubtree<Span>,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
let mut match_: Option<(matcher::Match<'_>, &crate::Rule, usize)> = None;
for (idx, rule) in rules.iter().enumerate() {
// Skip any rules that aren't relevant to the call style (fn-like/attr/derive).
if call_style != rule.style {
continue;
}
let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
if new_match.err.is_none() {

View file

@ -14,6 +14,7 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
extern crate rustc_lexer;
mod expander;
mod macro_call_style;
mod parser;
#[cfg(test)]
@ -29,6 +30,7 @@ use tt::iter::TtIter;
use std::fmt;
use std::sync::Arc;
pub use crate::macro_call_style::{MacroCallStyle, MacroCallStyles};
use crate::parser::{MetaTemplate, MetaVarKind, Op};
pub use tt::{Delimiter, DelimiterKind, Punct};
@ -137,6 +139,8 @@ pub struct DeclarativeMacro {
#[derive(Clone, Debug, PartialEq, Eq)]
struct Rule {
/// Is this a normal fn-like rule, an `attr()` rule, or a `derive()` rule?
style: MacroCallStyle,
lhs: MetaTemplate,
rhs: MetaTemplate,
}
@ -195,13 +199,18 @@ impl DeclarativeMacro {
let mut err = None;
if let Some(args) = args {
// The presence of an argument list means that this macro uses the
// "simple" syntax, where the body is the RHS of a single rule.
cov_mark::hit!(parse_macro_def_simple);
let rule = (|| {
let lhs = MetaTemplate::parse_pattern(ctx_edition, args.iter())?;
let rhs = MetaTemplate::parse_template(ctx_edition, body.iter())?;
Ok(crate::Rule { lhs, rhs })
// In the "simple" syntax, there is apparently no way to specify
// that the single rule is an attribute or derive rule, so it
// must be a function-like rule.
Ok(crate::Rule { style: MacroCallStyle::FnLike, lhs, rhs })
})();
match rule {
@ -209,6 +218,8 @@ impl DeclarativeMacro {
Err(e) => err = Some(Box::new(e)),
}
} else {
// There was no top-level argument list, so this macro uses the
// list-of-rules syntax, similar to `macro_rules!`.
cov_mark::hit!(parse_macro_def_rules);
let mut src = body.iter();
while !src.is_empty() {
@ -249,14 +260,28 @@ impl DeclarativeMacro {
self.rules.len()
}
pub fn rule_styles(&self) -> MacroCallStyles {
if self.rules.is_empty() {
// No rules could be parsed, so fall back to assuming that this
// is intended to be a function-like macro.
MacroCallStyles::FN_LIKE
} else {
self.rules
.iter()
.map(|rule| MacroCallStyles::from(rule.style))
.fold(MacroCallStyles::empty(), |a, b| a | b)
}
}
pub fn expand(
&self,
tt: &tt::TopSubtree<Span>,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
expander::expand_rules(&self.rules, tt, marker, call_style, call_site, def_site_edition)
}
}
@ -265,6 +290,9 @@ impl Rule {
edition: impl Copy + Fn(SyntaxContext) -> Edition,
src: &mut TtIter<'_, Span>,
) -> Result<Self, ParseError> {
// Parse an optional `attr()` or `derive()` prefix before the LHS pattern.
let style = parser::parse_rule_style(src)?;
let (_, lhs) =
src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@ -275,7 +303,7 @@ impl Rule {
let lhs = MetaTemplate::parse_pattern(edition, lhs)?;
let rhs = MetaTemplate::parse_template(edition, rhs)?;
Ok(crate::Rule { lhs, rhs })
Ok(crate::Rule { style, lhs, rhs })
}
}

View file

@ -0,0 +1,32 @@
//! Types representing the three basic "styles" of macro calls in Rust source:
//! - Function-like macros ("bang macros"), e.g. `foo!(...)`
//! - Attribute macros, e.g. `#[foo]`
//! - Derive macros, e.g. `#[derive(Foo)]`
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MacroCallStyle {
FnLike,
Attr,
Derive,
}
bitflags::bitflags! {
/// A set of `MacroCallStyle` values, allowing macros to indicate that
/// they support more than one style.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct MacroCallStyles: u8 {
const FN_LIKE = (1 << 0);
const ATTR = (1 << 1);
const DERIVE = (1 << 2);
}
}
impl From<MacroCallStyle> for MacroCallStyles {
fn from(kind: MacroCallStyle) -> Self {
match kind {
MacroCallStyle::FnLike => Self::FN_LIKE,
MacroCallStyle::Attr => Self::ATTR,
MacroCallStyle::Derive => Self::DERIVE,
}
}
}

View file

@ -11,7 +11,34 @@ use tt::{
iter::{TtElement, TtIter},
};
use crate::ParseError;
use crate::{MacroCallStyle, ParseError};
pub(crate) fn parse_rule_style(src: &mut TtIter<'_, Span>) -> Result<MacroCallStyle, ParseError> {
// Skip an optional `unsafe`. This is only actually allowed for `attr`
// rules, but we'll let rustc worry about that.
if let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = src.peek()
&& ident.sym == sym::unsafe_
{
src.next().expect("already peeked");
}
let kind = match src.peek() {
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) if ident.sym == sym::attr => {
src.next().expect("already peeked");
// FIXME: Add support for `attr(..)` rules with attribute arguments,
// which would be inside these parens.
src.expect_subtree().map_err(|_| ParseError::expected("expected `()`"))?;
MacroCallStyle::Attr
}
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) if ident.sym == sym::derive => {
src.next().expect("already peeked");
src.expect_subtree().map_err(|_| ParseError::expected("expected `()`"))?;
MacroCallStyle::Derive
}
_ => MacroCallStyle::FnLike,
};
Ok(kind)
}
/// Consider
///

View file

@ -51,6 +51,7 @@ fn check_(
let res = mac.expand(
&arg_tt,
|_| (),
crate::MacroCallStyle::FnLike,
Span {
range: TextRange::up_to(TextSize::of(arg)),
anchor: call_anchor,

View file

@ -29,6 +29,8 @@ proc-macro-srv = {workspace = true, optional = true}
span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
postcard.workspace = true
semver.workspace = true
[features]
sysroot-abi = ["proc-macro-srv", "proc-macro-srv/sysroot-abi"]

View file

@ -0,0 +1,12 @@
//! Protocol codec
use std::io;
use serde::de::DeserializeOwned;
use crate::framing::Framing;
pub trait Codec: Framing {
fn encode<T: serde::Serialize>(msg: &T) -> io::Result<Self::Buf>;
fn decode<T: DeserializeOwned>(buf: &mut Self::Buf) -> io::Result<T>;
}

View file

@ -0,0 +1,14 @@
//! Protocol framing
use std::io::{self, BufRead, Write};
pub trait Framing {
type Buf: Default;
fn read<'a, R: BufRead>(
inp: &mut R,
buf: &'a mut Self::Buf,
) -> io::Result<Option<&'a mut Self::Buf>>;
fn write<W: Write>(out: &mut W, buf: &Self::Buf) -> io::Result<()>;
}

View file

@ -2,6 +2,7 @@
pub mod json;
pub mod msg;
pub mod postcard;
use std::{
io::{BufRead, Write},
@ -13,13 +14,15 @@ use span::Span;
use crate::{
ProcMacro, ProcMacroKind, ServerError,
codec::Codec,
legacy_protocol::{
json::{read_json, write_json},
json::JsonProtocol,
msg::{
ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, Message, Request, Response,
ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map,
flat::serialize_span_data_index_map,
},
postcard::PostcardProtocol,
},
process::ProcMacroServerProcess,
version,
@ -151,21 +154,25 @@ fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result<Response, Ser
return Err(server_error.clone());
}
srv.send_task(send_request, req)
if srv.use_postcard() {
srv.send_task(send_request::<PostcardProtocol>, req)
} else {
srv.send_task(send_request::<JsonProtocol>, req)
}
}
/// Sends a request to the server and reads the response.
fn send_request(
fn send_request<P: Codec>(
mut writer: &mut dyn Write,
mut reader: &mut dyn BufRead,
req: Request,
buf: &mut String,
buf: &mut P::Buf,
) -> Result<Option<Response>, ServerError> {
req.write(write_json, &mut writer).map_err(|err| ServerError {
req.write::<_, P>(&mut writer).map_err(|err| ServerError {
message: "failed to write request".into(),
io: Some(Arc::new(err)),
})?;
let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError {
let res = Response::read::<_, P>(&mut reader, buf).map_err(|err| ServerError {
message: "failed to read response".into(),
io: Some(Arc::new(err)),
})?;

View file

@ -1,36 +1,58 @@
//! Protocol functions for json.
use std::io::{self, BufRead, Write};
/// Reads a JSON message from the input stream.
pub fn read_json<'a>(
inp: &mut impl BufRead,
buf: &'a mut String,
) -> io::Result<Option<&'a String>> {
loop {
buf.clear();
use serde::{Serialize, de::DeserializeOwned};
inp.read_line(buf)?;
buf.pop(); // Remove trailing '\n'
use crate::{codec::Codec, framing::Framing};
if buf.is_empty() {
return Ok(None);
pub struct JsonProtocol;
impl Framing for JsonProtocol {
type Buf = String;
fn read<'a, R: BufRead>(
inp: &mut R,
buf: &'a mut String,
) -> io::Result<Option<&'a mut String>> {
loop {
buf.clear();
inp.read_line(buf)?;
buf.pop(); // Remove trailing '\n'
if buf.is_empty() {
return Ok(None);
}
// Some ill behaved macro try to use stdout for debugging
// We ignore it here
if !buf.starts_with('{') {
tracing::error!("proc-macro tried to print : {}", buf);
continue;
}
return Ok(Some(buf));
}
}
// Some ill behaved macro try to use stdout for debugging
// We ignore it here
if !buf.starts_with('{') {
tracing::error!("proc-macro tried to print : {}", buf);
continue;
}
return Ok(Some(buf));
fn write<W: Write>(out: &mut W, buf: &String) -> io::Result<()> {
tracing::debug!("> {}", buf);
out.write_all(buf.as_bytes())?;
out.write_all(b"\n")?;
out.flush()
}
}
/// Writes a JSON message to the output stream.
pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
tracing::debug!("> {}", msg);
out.write_all(msg.as_bytes())?;
out.write_all(b"\n")?;
out.flush()
impl Codec for JsonProtocol {
fn encode<T: Serialize>(msg: &T) -> io::Result<String> {
Ok(serde_json::to_string(msg)?)
}
fn decode<T: DeserializeOwned>(buf: &mut String) -> io::Result<T> {
let mut deserializer = serde_json::Deserializer::from_str(buf);
// Note that some proc-macro generate very deep syntax tree
// We have to disable the current limit of serde here
deserializer.disable_recursion_limit();
Ok(T::deserialize(&mut deserializer)?)
}
}

View file

@ -8,7 +8,7 @@ use paths::Utf8PathBuf;
use serde::de::DeserializeOwned;
use serde_derive::{Deserialize, Serialize};
use crate::ProcMacroKind;
use crate::{ProcMacroKind, codec::Codec};
/// Represents requests sent from the client to the proc-macro-srv.
#[derive(Debug, Serialize, Deserialize)]
@ -149,39 +149,21 @@ impl ExpnGlobals {
}
pub trait Message: serde::Serialize + DeserializeOwned {
fn read<R: BufRead>(
from_proto: ProtocolRead<R>,
inp: &mut R,
buf: &mut String,
) -> io::Result<Option<Self>> {
Ok(match from_proto(inp, buf)? {
fn read<R: BufRead, C: Codec>(inp: &mut R, buf: &mut C::Buf) -> io::Result<Option<Self>> {
Ok(match C::read(inp, buf)? {
None => None,
Some(text) => {
let mut deserializer = serde_json::Deserializer::from_str(text);
// Note that some proc-macro generate very deep syntax tree
// We have to disable the current limit of serde here
deserializer.disable_recursion_limit();
Some(Self::deserialize(&mut deserializer)?)
}
Some(buf) => Some(C::decode(buf)?),
})
}
fn write<W: Write>(self, to_proto: ProtocolWrite<W>, out: &mut W) -> io::Result<()> {
let text = serde_json::to_string(&self)?;
to_proto(out, &text)
fn write<W: Write, C: Codec>(self, out: &mut W) -> io::Result<()> {
let value = C::encode(&self)?;
C::write(out, &value)
}
}
impl Message for Request {}
impl Message for Response {}
/// Type alias for a function that reads protocol messages from a buffered input stream.
#[allow(type_alias_bounds)]
type ProtocolRead<R: BufRead> =
for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result<Option<&'buf String>>;
/// Type alias for a function that writes protocol messages to an output stream.
#[allow(type_alias_bounds)]
type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>;
#[cfg(test)]
mod tests {
use intern::{Symbol, sym};

View file

@ -303,6 +303,7 @@ impl FlatTree {
pub fn to_tokenstream_unresolved<T: SpanTransformer<Table = ()>>(
self,
version: u32,
span_join: impl Fn(T::Span, T::Span) -> T::Span,
) -> proc_macro_srv::TokenStream<T::Span> {
Reader::<T> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@ -326,13 +327,14 @@ impl FlatTree {
span_data_table: &(),
version,
}
.read_tokenstream()
.read_tokenstream(span_join)
}
pub fn to_tokenstream_resolved(
self,
version: u32,
span_data_table: &SpanDataIndexMap,
span_join: impl Fn(Span, Span) -> Span,
) -> proc_macro_srv::TokenStream<Span> {
Reader::<Span> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@ -356,7 +358,7 @@ impl FlatTree {
span_data_table,
version,
}
.read_tokenstream()
.read_tokenstream(span_join)
}
}
@ -842,7 +844,10 @@ impl<T: SpanTransformer> Reader<'_, T> {
#[cfg(feature = "sysroot-abi")]
impl<T: SpanTransformer> Reader<'_, T> {
pub(crate) fn read_tokenstream(self) -> proc_macro_srv::TokenStream<T::Span> {
pub(crate) fn read_tokenstream(
self,
span_join: impl Fn(T::Span, T::Span) -> T::Span,
) -> proc_macro_srv::TokenStream<T::Span> {
let mut res: Vec<Option<proc_macro_srv::Group<T::Span>>> = vec![None; self.subtree.len()];
let read_span = |id| T::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
@ -935,6 +940,8 @@ impl<T: SpanTransformer> Reader<'_, T> {
}
})
.collect::<Vec<_>>();
let open = read_span(repr.open);
let close = read_span(repr.close);
let g = proc_macro_srv::Group {
delimiter: match repr.kind {
tt::DelimiterKind::Parenthesis => proc_macro_srv::Delimiter::Parenthesis,
@ -944,10 +951,10 @@ impl<T: SpanTransformer> Reader<'_, T> {
},
stream: if stream.is_empty() { None } else { Some(TokenStream::new(stream)) },
span: proc_macro_srv::DelimSpan {
open: read_span(repr.open),
close: read_span(repr.close),
// FIXME
entire: read_span(repr.close),
open,
close,
// FIXME: The protocol does not yet encode entire spans ...
entire: span_join(open, close),
},
};
res[i] = Some(g);

View file

@ -0,0 +1,40 @@
//! Postcard encode and decode implementations.
use std::io::{self, BufRead, Write};
use serde::{Serialize, de::DeserializeOwned};
use crate::{codec::Codec, framing::Framing};
pub struct PostcardProtocol;
impl Framing for PostcardProtocol {
type Buf = Vec<u8>;
fn read<'a, R: BufRead>(
inp: &mut R,
buf: &'a mut Vec<u8>,
) -> io::Result<Option<&'a mut Vec<u8>>> {
buf.clear();
let n = inp.read_until(0, buf)?;
if n == 0 {
return Ok(None);
}
Ok(Some(buf))
}
fn write<W: Write>(out: &mut W, buf: &Vec<u8>) -> io::Result<()> {
out.write_all(buf)?;
out.flush()
}
}
impl Codec for PostcardProtocol {
fn encode<T: Serialize>(msg: &T) -> io::Result<Vec<u8>> {
postcard::to_allocvec_cobs(msg).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
fn decode<T: DeserializeOwned>(buf: &mut Self::Buf) -> io::Result<T> {
postcard::from_bytes_cobs(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
}

View file

@ -12,13 +12,17 @@
)]
#![allow(internal_features)]
mod codec;
mod framing;
pub mod legacy_protocol;
mod process;
use paths::{AbsPath, AbsPathBuf};
use semver::Version;
use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use std::{fmt, io, sync::Arc, time::SystemTime};
pub use crate::codec::Codec;
use crate::process::ProcMacroServerProcess;
/// The versions of the server protocol
@ -122,8 +126,9 @@ impl ProcMacroClient {
env: impl IntoIterator<
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
> + Clone,
version: Option<&Version>,
) -> io::Result<ProcMacroClient> {
let process = ProcMacroServerProcess::run(process_path, env)?;
let process = ProcMacroServerProcess::run(process_path, env, version)?;
Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
}

View file

@ -8,6 +8,7 @@ use std::{
};
use paths::AbsPath;
use semver::Version;
use stdx::JodChild;
use crate::{
@ -28,9 +29,10 @@ pub(crate) struct ProcMacroServerProcess {
exited: OnceLock<AssertUnwindSafe<ServerError>>,
}
#[derive(Debug)]
enum Protocol {
#[derive(Debug, Clone)]
pub(crate) enum Protocol {
LegacyJson { mode: SpanMode },
LegacyPostcard { mode: SpanMode },
}
/// Maintains the state of the proc-macro server process.
@ -48,50 +50,76 @@ impl ProcMacroServerProcess {
env: impl IntoIterator<
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
> + Clone,
version: Option<&Version>,
) -> io::Result<ProcMacroServerProcess> {
let create_srv = || {
let mut process = Process::run(process_path, env.clone())?;
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
const VERSION: Version = Version::new(1, 93, 0);
// we do `>` for nightly as this started working in the middle of the 1.93 nightly release, so we dont want to break on half of the nightlies
let has_working_format_flag = version.map_or(false, |v| {
if v.pre.as_str() == "nightly" { *v > VERSION } else { *v >= VERSION }
});
io::Result::Ok(ProcMacroServerProcess {
state: Mutex::new(ProcessSrvState { process, stdin, stdout }),
version: 0,
protocol: Protocol::LegacyJson { mode: SpanMode::Id },
exited: OnceLock::new(),
})
let formats: &[_] = if has_working_format_flag {
&[
(Some("postcard-legacy"), Protocol::LegacyPostcard { mode: SpanMode::Id }),
(Some("json-legacy"), Protocol::LegacyJson { mode: SpanMode::Id }),
]
} else {
&[(None, Protocol::LegacyJson { mode: SpanMode::Id })]
};
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
match srv.version_check() {
Ok(v) if v > version::CURRENT_API_VERSION => {
#[allow(clippy::disallowed_methods)]
let process_version = Command::new(process_path)
.arg("--version")
.output()
.map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned())
.unwrap_or_else(|_| "unknown version".to_owned());
Err(io::Error::other(format!(
"Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \
let mut err = None;
for &(format, ref protocol) in formats {
let create_srv = || {
let mut process = Process::run(process_path, env.clone(), format)?;
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
io::Result::Ok(ProcMacroServerProcess {
state: Mutex::new(ProcessSrvState { process, stdin, stdout }),
version: 0,
protocol: protocol.clone(),
exited: OnceLock::new(),
})
};
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
match srv.version_check() {
Ok(v) if v > version::CURRENT_API_VERSION => {
#[allow(clippy::disallowed_methods)]
let process_version = Command::new(process_path)
.arg("--version")
.output()
.map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned())
.unwrap_or_else(|_| "unknown version".to_owned());
err = Some(io::Error::other(format!(
"Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain.",
version::CURRENT_API_VERSION
)))
}
Ok(v) => {
tracing::info!("Proc-macro server version: {v}");
srv.version = v;
if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT
&& let Ok(mode) = srv.enable_rust_analyzer_spans()
{
srv.protocol = Protocol::LegacyJson { mode };
version::CURRENT_API_VERSION
)));
}
Ok(v) => {
tracing::info!("Proc-macro server version: {v}");
srv.version = v;
if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT
&& let Ok(new_mode) = srv.enable_rust_analyzer_spans()
{
match &mut srv.protocol {
Protocol::LegacyJson { mode } | Protocol::LegacyPostcard { mode } => {
*mode = new_mode
}
}
}
tracing::info!("Proc-macro server protocol: {:?}", srv.protocol);
return Ok(srv);
}
Err(e) => {
tracing::info!(%e, "proc-macro version check failed");
err = Some(io::Error::other(format!(
"proc-macro server version check failed: {e}"
)))
}
tracing::info!("Proc-macro server protocol: {:?}", srv.protocol);
Ok(srv)
}
Err(e) => {
tracing::info!(%e, "proc-macro version check failed");
Err(io::Error::other(format!("proc-macro server version check failed: {e}")))
}
}
Err(err.unwrap())
}
/// Returns the server error if the process has exited.
@ -99,6 +127,10 @@ impl ProcMacroServerProcess {
self.exited.get().map(|it| &it.0)
}
pub(crate) fn use_postcard(&self) -> bool {
matches!(self.protocol, Protocol::LegacyPostcard { .. })
}
/// Retrieves the API version of the proc-macro server.
pub(crate) fn version(&self) -> u32 {
self.version
@ -108,6 +140,7 @@ impl ProcMacroServerProcess {
pub(crate) fn rust_analyzer_spans(&self) -> bool {
match self.protocol {
Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer,
Protocol::LegacyPostcard { mode } => mode == SpanMode::RustAnalyzer,
}
}
@ -115,6 +148,7 @@ impl ProcMacroServerProcess {
fn version_check(&self) -> Result<u32, ServerError> {
match self.protocol {
Protocol::LegacyJson { .. } => legacy_protocol::version_check(self),
Protocol::LegacyPostcard { .. } => legacy_protocol::version_check(self),
}
}
@ -122,6 +156,7 @@ impl ProcMacroServerProcess {
fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> {
match self.protocol {
Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
Protocol::LegacyPostcard { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
}
}
@ -132,21 +167,25 @@ impl ProcMacroServerProcess {
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
match self.protocol {
Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
Protocol::LegacyPostcard { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
}
}
pub(crate) fn send_task<Request, Response>(
pub(crate) fn send_task<Request, Response, Buf>(
&self,
serialize_req: impl FnOnce(
&mut dyn Write,
&mut dyn BufRead,
Request,
&mut String,
&mut Buf,
) -> Result<Option<Response>, ServerError>,
req: Request,
) -> Result<Response, ServerError> {
) -> Result<Response, ServerError>
where
Buf: Default,
{
let state = &mut *self.state.lock().unwrap();
let mut buf = String::new();
let mut buf = Buf::default();
serialize_req(&mut state.stdin, &mut state.stdout, req, &mut buf)
.and_then(|res| {
res.ok_or_else(|| {
@ -203,8 +242,9 @@ impl Process {
env: impl IntoIterator<
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
>,
format: Option<&str>,
) -> io::Result<Process> {
let child = JodChild(mk_child(path, env)?);
let child = JodChild(mk_child(path, env, format)?);
Ok(Process { child })
}
@ -224,6 +264,7 @@ fn mk_child<'a>(
extra_env: impl IntoIterator<
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
>,
format: Option<&str>,
) -> io::Result<Child> {
#[allow(clippy::disallowed_methods)]
let mut cmd = Command::new(path);
@ -233,6 +274,10 @@ fn mk_child<'a>(
(key, None) => cmd.env_remove(key),
};
}
if let Some(format) = format {
cmd.arg("--format");
cmd.arg(format);
}
cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
.stdin(Stdio::piped())
.stdout(Stdio::piped())

View file

@ -14,14 +14,13 @@ publish = false
proc-macro-srv.workspace = true
proc-macro-api.workspace = true
tt.workspace = true
postcard.workspace = true
clap = {version = "4.5.42", default-features = false, features = ["std"]}
postcard = { version = "1.1.3", optional = true }
[features]
default = ["postcard"]
default = []
sysroot-abi = ["proc-macro-srv/sysroot-abi", "proc-macro-api/sysroot-abi"]
in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"]
postcard = ["dep:postcard"]
[[bin]]

View file

@ -31,7 +31,7 @@ fn main() -> std::io::Result<()> {
clap::Arg::new("format")
.long("format")
.action(clap::ArgAction::Set)
.default_value("json")
.default_value("json-legacy")
.value_parser(clap::builder::EnumValueParser::<ProtocolFormat>::new()),
clap::Arg::new("version")
.long("version")
@ -50,28 +50,27 @@ fn main() -> std::io::Result<()> {
#[derive(Copy, Clone)]
enum ProtocolFormat {
Json,
#[cfg(feature = "postcard")]
Postcard,
JsonLegacy,
PostcardLegacy,
}
impl ValueEnum for ProtocolFormat {
fn value_variants<'a>() -> &'a [Self] {
&[ProtocolFormat::Json]
&[ProtocolFormat::JsonLegacy, ProtocolFormat::PostcardLegacy]
}
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
match self {
ProtocolFormat::Json => Some(clap::builder::PossibleValue::new("json")),
#[cfg(feature = "postcard")]
ProtocolFormat::Postcard => Some(clap::builder::PossibleValue::new("postcard")),
ProtocolFormat::JsonLegacy => Some(clap::builder::PossibleValue::new("json-legacy")),
ProtocolFormat::PostcardLegacy => {
Some(clap::builder::PossibleValue::new("postcard-legacy"))
}
}
}
fn from_str(input: &str, _ignore_case: bool) -> Result<Self, String> {
match input {
"json" => Ok(ProtocolFormat::Json),
#[cfg(feature = "postcard")]
"postcard" => Ok(ProtocolFormat::Postcard),
"json-legacy" => Ok(ProtocolFormat::JsonLegacy),
"postcard-legacy" => Ok(ProtocolFormat::PostcardLegacy),
_ => Err(format!("unknown protocol format: {input}")),
}
}

View file

@ -2,19 +2,20 @@
use std::io;
use proc_macro_api::{
Codec,
legacy_protocol::{
json::{read_json, write_json},
json::JsonProtocol,
msg::{
self, ExpandMacroData, ExpnGlobals, Message, SpanMode, SpanTransformer,
deserialize_span_data_index_map, serialize_span_data_index_map,
},
postcard::PostcardProtocol,
},
version::CURRENT_API_VERSION,
};
use proc_macro_srv::{EnvSnapshot, SpanId};
use crate::ProtocolFormat;
struct SpanTrans;
impl SpanTransformer for SpanTrans {
@ -36,13 +37,12 @@ impl SpanTransformer for SpanTrans {
pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> {
match format {
ProtocolFormat::Json => run_json(),
#[cfg(feature = "postcard")]
ProtocolFormat::Postcard => unimplemented!(),
ProtocolFormat::JsonLegacy => run_::<JsonProtocol>(),
ProtocolFormat::PostcardLegacy => run_::<PostcardProtocol>(),
}
}
fn run_json() -> io::Result<()> {
fn run_<C: Codec>() -> io::Result<()> {
fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind {
match kind {
proc_macro_srv::ProcMacroKind::CustomDerive => {
@ -53,9 +53,9 @@ fn run_json() -> io::Result<()> {
}
}
let mut buf = String::new();
let mut read_request = || msg::Request::read(read_json, &mut io::stdin().lock(), &mut buf);
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
let mut buf = C::Buf::default();
let mut read_request = || msg::Request::read::<_, C>(&mut io::stdin().lock(), &mut buf);
let write_response = |msg: msg::Response| msg.write::<_, C>(&mut io::stdout().lock());
let env = EnvSnapshot::default();
let srv = proc_macro_srv::ProcMacroSrv::new(&env);
@ -90,10 +90,10 @@ fn run_json() -> io::Result<()> {
let call_site = SpanId(call_site as u32);
let mixed_site = SpanId(mixed_site as u32);
let macro_body =
macro_body.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION);
let macro_body = macro_body
.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION, |_, b| b);
let attributes = attributes.map(|it| {
it.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION)
it.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION, |_, b| b)
});
srv.expand(
@ -124,10 +124,17 @@ fn run_json() -> io::Result<()> {
let call_site = span_data_table[call_site];
let mixed_site = span_data_table[mixed_site];
let macro_body = macro_body
.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table);
let macro_body = macro_body.to_tokenstream_resolved(
CURRENT_API_VERSION,
&span_data_table,
|a, b| srv.join_spans(a, b).unwrap_or(b),
);
let attributes = attributes.map(|it| {
it.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table)
it.to_tokenstream_resolved(
CURRENT_API_VERSION,
&span_data_table,
|a, b| srv.join_spans(a, b).unwrap_or(b),
)
});
srv.expand(
lib,

View file

@ -94,6 +94,11 @@ pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream {
format!("compile_error!(\"#[attr_error({})] {}\");", args, item).parse().unwrap()
}
#[proc_macro_derive(DeriveReemit, attributes(helper))]
pub fn derive_reemit(item: TokenStream) -> TokenStream {
item
}
#[proc_macro_derive(DeriveEmpty)]
pub fn derive_empty(_item: TokenStream) -> TokenStream {
TokenStream::default()

View file

@ -81,6 +81,35 @@ impl<'env> ProcMacroSrv<'env> {
temp_dir: TempDir::with_prefix("proc-macro-srv").unwrap(),
}
}
pub fn join_spans(&self, first: Span, second: Span) -> Option<Span> {
// We can't modify the span range for fixup spans, those are meaningful to fixup, so just
// prefer the non-fixup span.
if first.anchor.ast_id == span::FIXUP_ERASED_FILE_AST_ID_MARKER {
return Some(second);
}
if second.anchor.ast_id == span::FIXUP_ERASED_FILE_AST_ID_MARKER {
return Some(first);
}
// FIXME: Once we can talk back to the client, implement a "long join" request for anchors
// that differ in [AstId]s as joining those spans requires resolving the AstIds.
if first.anchor != second.anchor {
return None;
}
// Differing context, we can't merge these so prefer the one that's root
if first.ctx != second.ctx {
if first.ctx.is_root() {
return Some(second);
} else if second.ctx.is_root() {
return Some(first);
}
}
Some(Span {
range: first.range.cover(second.range),
anchor: second.anchor,
ctx: second.ctx,
})
}
}
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;

View file

@ -52,6 +52,165 @@ fn test_derive_empty() {
);
}
#[test]
fn test_derive_reemit_helpers() {
assert_expand(
"DeriveReemit",
r#"
#[helper(build_fn(private, name = "partial_build"))]
pub struct Foo {
/// The domain where this federated instance is running
#[helper(setter(into))]
pub(crate) domain: String,
}
"#,
expect![[r#"
PUNCT 1 # [joint]
GROUP [] 1 1 1
IDENT 1 helper
GROUP () 1 1 1
IDENT 1 build_fn
GROUP () 1 1 1
IDENT 1 private
PUNCT 1 , [alone]
IDENT 1 name
PUNCT 1 = [alone]
LITER 1 Str partial_build
IDENT 1 pub
IDENT 1 struct
IDENT 1 Foo
GROUP {} 1 1 1
PUNCT 1 # [alone]
GROUP [] 1 1 1
IDENT 1 doc
PUNCT 1 = [alone]
LITER 1 Str / The domain where this federated instance is running
PUNCT 1 # [joint]
GROUP [] 1 1 1
IDENT 1 helper
GROUP () 1 1 1
IDENT 1 setter
GROUP () 1 1 1
IDENT 1 into
IDENT 1 pub
GROUP () 1 1 1
IDENT 1 crate
IDENT 1 domain
PUNCT 1 : [alone]
IDENT 1 String
PUNCT 1 , [alone]
PUNCT 1 # [joint]
GROUP [] 1 1 1
IDENT 1 helper
GROUP () 1 1 1
IDENT 1 build_fn
GROUP () 1 1 1
IDENT 1 private
PUNCT 1 , [alone]
IDENT 1 name
PUNCT 1 = [alone]
LITER 1 Str partial_build
IDENT 1 pub
IDENT 1 struct
IDENT 1 Foo
GROUP {} 1 1 1
PUNCT 1 # [alone]
GROUP [] 1 1 1
IDENT 1 doc
PUNCT 1 = [alone]
LITER 1 Str / The domain where this federated instance is running
PUNCT 1 # [joint]
GROUP [] 1 1 1
IDENT 1 helper
GROUP () 1 1 1
IDENT 1 setter
GROUP () 1 1 1
IDENT 1 into
IDENT 1 pub
GROUP () 1 1 1
IDENT 1 crate
IDENT 1 domain
PUNCT 1 : [alone]
IDENT 1 String
PUNCT 1 , [alone]
"#]],
expect![[r#"
PUNCT 42:Root[0000, 0]@1..2#ROOT2024 # [joint]
GROUP [] 42:Root[0000, 0]@2..3#ROOT2024 42:Root[0000, 0]@52..53#ROOT2024 42:Root[0000, 0]@2..53#ROOT2024
IDENT 42:Root[0000, 0]@3..9#ROOT2024 helper
GROUP () 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@51..52#ROOT2024 42:Root[0000, 0]@9..52#ROOT2024
IDENT 42:Root[0000, 0]@10..18#ROOT2024 build_fn
GROUP () 42:Root[0000, 0]@18..19#ROOT2024 42:Root[0000, 0]@50..51#ROOT2024 42:Root[0000, 0]@18..51#ROOT2024
IDENT 42:Root[0000, 0]@19..26#ROOT2024 private
PUNCT 42:Root[0000, 0]@26..27#ROOT2024 , [alone]
IDENT 42:Root[0000, 0]@28..32#ROOT2024 name
PUNCT 42:Root[0000, 0]@33..34#ROOT2024 = [alone]
LITER 42:Root[0000, 0]@35..50#ROOT2024 Str partial_build
IDENT 42:Root[0000, 0]@54..57#ROOT2024 pub
IDENT 42:Root[0000, 0]@58..64#ROOT2024 struct
IDENT 42:Root[0000, 0]@65..68#ROOT2024 Foo
GROUP {} 42:Root[0000, 0]@69..70#ROOT2024 42:Root[0000, 0]@190..191#ROOT2024 42:Root[0000, 0]@69..191#ROOT2024
PUNCT 42:Root[0000, 0]@0..0#ROOT2024 # [alone]
GROUP [] 42:Root[0000, 0]@0..0#ROOT2024 42:Root[0000, 0]@0..0#ROOT2024 42:Root[0000, 0]@0..0#ROOT2024
IDENT 42:Root[0000, 0]@0..0#ROOT2024 doc
PUNCT 42:Root[0000, 0]@0..0#ROOT2024 = [alone]
LITER 42:Root[0000, 0]@75..130#ROOT2024 Str / The domain where this federated instance is running
PUNCT 42:Root[0000, 0]@135..136#ROOT2024 # [joint]
GROUP [] 42:Root[0000, 0]@136..137#ROOT2024 42:Root[0000, 0]@157..158#ROOT2024 42:Root[0000, 0]@136..158#ROOT2024
IDENT 42:Root[0000, 0]@137..143#ROOT2024 helper
GROUP () 42:Root[0000, 0]@143..144#ROOT2024 42:Root[0000, 0]@156..157#ROOT2024 42:Root[0000, 0]@143..157#ROOT2024
IDENT 42:Root[0000, 0]@144..150#ROOT2024 setter
GROUP () 42:Root[0000, 0]@150..151#ROOT2024 42:Root[0000, 0]@155..156#ROOT2024 42:Root[0000, 0]@150..156#ROOT2024
IDENT 42:Root[0000, 0]@151..155#ROOT2024 into
IDENT 42:Root[0000, 0]@163..166#ROOT2024 pub
GROUP () 42:Root[0000, 0]@166..167#ROOT2024 42:Root[0000, 0]@172..173#ROOT2024 42:Root[0000, 0]@166..173#ROOT2024
IDENT 42:Root[0000, 0]@167..172#ROOT2024 crate
IDENT 42:Root[0000, 0]@174..180#ROOT2024 domain
PUNCT 42:Root[0000, 0]@180..181#ROOT2024 : [alone]
IDENT 42:Root[0000, 0]@182..188#ROOT2024 String
PUNCT 42:Root[0000, 0]@188..189#ROOT2024 , [alone]
PUNCT 42:Root[0000, 0]@1..2#ROOT2024 # [joint]
GROUP [] 42:Root[0000, 0]@2..3#ROOT2024 42:Root[0000, 0]@52..53#ROOT2024 42:Root[0000, 0]@2..53#ROOT2024
IDENT 42:Root[0000, 0]@3..9#ROOT2024 helper
GROUP () 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@51..52#ROOT2024 42:Root[0000, 0]@9..52#ROOT2024
IDENT 42:Root[0000, 0]@10..18#ROOT2024 build_fn
GROUP () 42:Root[0000, 0]@18..19#ROOT2024 42:Root[0000, 0]@50..51#ROOT2024 42:Root[0000, 0]@18..51#ROOT2024
IDENT 42:Root[0000, 0]@19..26#ROOT2024 private
PUNCT 42:Root[0000, 0]@26..27#ROOT2024 , [alone]
IDENT 42:Root[0000, 0]@28..32#ROOT2024 name
PUNCT 42:Root[0000, 0]@33..34#ROOT2024 = [alone]
LITER 42:Root[0000, 0]@35..50#ROOT2024 Str partial_build
IDENT 42:Root[0000, 0]@54..57#ROOT2024 pub
IDENT 42:Root[0000, 0]@58..64#ROOT2024 struct
IDENT 42:Root[0000, 0]@65..68#ROOT2024 Foo
GROUP {} 42:Root[0000, 0]@69..70#ROOT2024 42:Root[0000, 0]@190..191#ROOT2024 42:Root[0000, 0]@69..191#ROOT2024
PUNCT 42:Root[0000, 0]@0..0#ROOT2024 # [alone]
GROUP [] 42:Root[0000, 0]@0..0#ROOT2024 42:Root[0000, 0]@0..0#ROOT2024 42:Root[0000, 0]@0..0#ROOT2024
IDENT 42:Root[0000, 0]@0..0#ROOT2024 doc
PUNCT 42:Root[0000, 0]@0..0#ROOT2024 = [alone]
LITER 42:Root[0000, 0]@75..130#ROOT2024 Str / The domain where this federated instance is running
PUNCT 42:Root[0000, 0]@135..136#ROOT2024 # [joint]
GROUP [] 42:Root[0000, 0]@136..137#ROOT2024 42:Root[0000, 0]@157..158#ROOT2024 42:Root[0000, 0]@136..158#ROOT2024
IDENT 42:Root[0000, 0]@137..143#ROOT2024 helper
GROUP () 42:Root[0000, 0]@143..144#ROOT2024 42:Root[0000, 0]@156..157#ROOT2024 42:Root[0000, 0]@143..157#ROOT2024
IDENT 42:Root[0000, 0]@144..150#ROOT2024 setter
GROUP () 42:Root[0000, 0]@150..151#ROOT2024 42:Root[0000, 0]@155..156#ROOT2024 42:Root[0000, 0]@150..156#ROOT2024
IDENT 42:Root[0000, 0]@151..155#ROOT2024 into
IDENT 42:Root[0000, 0]@163..166#ROOT2024 pub
GROUP () 42:Root[0000, 0]@166..167#ROOT2024 42:Root[0000, 0]@172..173#ROOT2024 42:Root[0000, 0]@166..173#ROOT2024
IDENT 42:Root[0000, 0]@167..172#ROOT2024 crate
IDENT 42:Root[0000, 0]@174..180#ROOT2024 domain
PUNCT 42:Root[0000, 0]@180..181#ROOT2024 : [alone]
IDENT 42:Root[0000, 0]@182..188#ROOT2024 String
PUNCT 42:Root[0000, 0]@188..189#ROOT2024 , [alone]
"#]],
);
}
#[test]
fn test_derive_error() {
assert_expand(
@ -69,7 +228,7 @@ fn test_derive_error() {
IDENT 1 compile_error
PUNCT 1 ! [joint]
GROUP () 1 1 1
LITER 1 Str #[derive(DeriveError)] struct S {field 58 u32 }
LITER 1 Str #[derive(DeriveError)] struct S {field : u32}
PUNCT 1 ; [alone]
"#]],
expect![[r#"
@ -83,9 +242,9 @@ fn test_derive_error() {
IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@64..65#ROOT2024 42:Root[0000, 0]@14..65#ROOT2024
LITER 42:Root[0000, 0]@15..64#ROOT2024 Str #[derive(DeriveError)] struct S {field 58 u32 }
PUNCT 42:Root[0000, 0]@65..66#ROOT2024 ; [alone]
GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@62..63#ROOT2024 42:Root[0000, 0]@14..63#ROOT2024
LITER 42:Root[0000, 0]@15..62#ROOT2024 Str #[derive(DeriveError)] struct S {field : u32}
PUNCT 42:Root[0000, 0]@63..64#ROOT2024 ; [alone]
"#]],
);
}
@ -472,7 +631,7 @@ fn test_attr_macro() {
IDENT 1 compile_error
PUNCT 1 ! [joint]
GROUP () 1 1 1
LITER 1 Str #[attr_error(some arguments )] mod m {}
LITER 1 Str #[attr_error(some arguments)] mod m {}
PUNCT 1 ; [alone]
"#]],
expect![[r#"
@ -487,9 +646,9 @@ fn test_attr_macro() {
IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@56..57#ROOT2024 42:Root[0000, 0]@14..57#ROOT2024
LITER 42:Root[0000, 0]@15..56#ROOT2024 Str #[attr_error(some arguments )] mod m {}
PUNCT 42:Root[0000, 0]@57..58#ROOT2024 ; [alone]
GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@55..56#ROOT2024 42:Root[0000, 0]@14..56#ROOT2024
LITER 42:Root[0000, 0]@15..55#ROOT2024 Str #[attr_error(some arguments)] mod m {}
PUNCT 42:Root[0000, 0]@56..57#ROOT2024 ; [alone]
"#]],
);
}
@ -535,6 +694,7 @@ fn list_test_macros() {
attr_noop [Attr]
attr_panic [Attr]
attr_error [Attr]
DeriveReemit [CustomDerive]
DeriveEmpty [CustomDerive]
DerivePanic [CustomDerive]
DeriveError [CustomDerive]"#]]

View file

@ -1,7 +1,7 @@
//! The proc-macro server token stream implementation.
use core::fmt;
use std::sync::Arc;
use std::{mem, sync::Arc};
use intern::Symbol;
use proc_macro::Delimiter;
@ -431,14 +431,22 @@ impl<S> TokenStream<S> {
impl<S> fmt::Display for TokenStream<S> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut emit_whitespace = false;
for tt in self.0.iter() {
display_token_tree(tt, f)?;
display_token_tree(tt, &mut emit_whitespace, f)?;
}
Ok(())
}
}
fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn display_token_tree<S>(
tt: &TokenTree<S>,
emit_whitespace: &mut bool,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
if mem::take(emit_whitespace) {
write!(f, " ")?;
}
match tt {
TokenTree::Group(Group { delimiter, stream, span: _ }) => {
write!(
@ -466,13 +474,15 @@ fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) ->
)?;
}
TokenTree::Punct(Punct { ch, joint, span: _ }) => {
write!(f, "{ch}{}", if *joint { "" } else { " " })?
*emit_whitespace = !*joint;
write!(f, "{}", *ch as char)?;
}
TokenTree::Ident(Ident { sym, is_raw, span: _ }) => {
if *is_raw {
write!(f, "r#")?;
}
write!(f, "{sym} ")?;
write!(f, "{sym}")?;
*emit_whitespace = true;
}
TokenTree::Literal(lit) => {
display_fmt_literal(lit, f)?;
@ -485,9 +495,7 @@ fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) ->
| LitKind::CStrRaw(_) => true,
_ => false,
};
if !joint {
write!(f, " ")?;
}
*emit_whitespace = !joint;
}
}
Ok(())
@ -737,9 +745,10 @@ mod tests {
use super::*;
#[test]
fn roundtrip() {
let token_stream = TokenStream::from_str("struct T {\"string\"}", ()).unwrap();
token_stream.to_string();
assert_eq!(token_stream.to_string(), "struct T {\"string\"}");
fn ts_to_string() {
let token_stream =
TokenStream::from_str("{} () [] <> ;/., \"gfhdgfuiofghd\" 0f32 r#\"dff\"# 'r#lt", ())
.unwrap();
assert_eq!(token_stream.to_string(), "{}()[]<> ;/., \"gfhdgfuiofghd\"0f32 r#\"dff\"#'r#lt");
}
}

View file

@ -700,7 +700,7 @@ impl GlobalState {
};
info!("Using proc-macro server at {path}");
Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
Some(ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref()).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);

View file

@ -1 +1 @@
6159a44067ebce42b38f062cc7df267a1348e092
1be6b13be73dc12e98e51b403add4c41a0b77759

View file

@ -134,8 +134,19 @@ fn dist_server(
};
let mut cmd = build_command(sh, command, &target_name, features, dev_rel);
let mut rustflags = Vec::new();
if let Some(profile) = pgo_profile {
cmd = cmd.env("RUSTFLAGS", format!("-Cprofile-use={}", profile.to_str().unwrap()));
rustflags.push(format!("-Cprofile-use={}", profile.to_str().unwrap()));
}
if target_name.ends_with("-windows-msvc") {
// https://github.com/rust-lang/rust-analyzer/issues/20970
rustflags.push("-Ctarget-feature=+crt-static".to_owned());
}
if !rustflags.is_empty() {
cmd = cmd.env("RUSTFLAGS", rustflags.join(" "));
}
cmd.run().context("cannot build Rust Analyzer")?;

View file

@ -26,20 +26,6 @@ error[E0463]: can't find crate for `wloop`
LL | extern crate wloop;
| ^^^^^^^^^^^^^^^^^^^ can't find crate
error: malformed `windows_subsystem` attribute input
--> $DIR/malformed-attrs.rs:26:1
|
LL | #![windows_subsystem]
| ^^^^^^^^^^^^^^^^^^^^^
|
= note: for more information, visit <https://doc.rust-lang.org/reference/runtime.html#the-windows_subsystem-attribute>
help: the following are the possible correct uses
|
LL | #![windows_subsystem = "console"]
| +++++++++++
LL | #![windows_subsystem = "windows"]
| +++++++++++
error: malformed `instruction_set` attribute input
--> $DIR/malformed-attrs.rs:112:1
|
@ -217,6 +203,22 @@ LL | #[doc]
= note: for more information, see issue #57571 <https://github.com/rust-lang/rust/issues/57571>
= note: for more information, visit <https://doc.rust-lang.org/rustdoc/write-documentation/the-doc-attribute.html>
error[E0539]: malformed `windows_subsystem` attribute input
--> $DIR/malformed-attrs.rs:26:1
|
LL | #![windows_subsystem]
| ^^^-----------------^
| |
| expected this to be of the form `windows_subsystem = "..."`
|
= note: for more information, visit <https://doc.rust-lang.org/reference/runtime.html#the-windows_subsystem-attribute>
help: try changing it to one of the following valid forms of the attribute
|
LL | #![windows_subsystem = "console"]
| +++++++++++
LL | #![windows_subsystem = "windows"]
| +++++++++++
error[E0539]: malformed `export_name` attribute input
--> $DIR/malformed-attrs.rs:29:1
|

View file

@ -0,0 +1,32 @@
error[E0308]: mismatched types
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:16:33
|
LL | let _: Box<dyn Send> = foo(((), ()));
| ^^ expected trait object, found `()`
|
= note: expected trait object `dyn Send`
found unit type `()`
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:16:32
|
LL | let _: Box<dyn Send> = foo(((), ()));
| --- ^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `foo`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:10:8
|
LL | fn foo<T>(x: (T, ())) -> Box<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `foo`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn foo<T: ?Sized>(x: (T, ())) -> Box<T> {
| ++++++++
error: aborting due to 2 previous errors
Some errors have detailed explanations: E0277, E0308.
For more information about an error, try `rustc --explain E0277`.

View file

@ -0,0 +1,32 @@
error[E0308]: mismatched types
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:16:33
|
LL | let _: Box<dyn Send> = foo(((), ()));
| ^^ expected trait object, found `()`
|
= note: expected trait object `dyn Send`
found unit type `()`
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:16:32
|
LL | let _: Box<dyn Send> = foo(((), ()));
| --- ^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `foo`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-1.rs:10:8
|
LL | fn foo<T>(x: (T, ())) -> Box<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `foo`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn foo<T: ?Sized>(x: (T, ())) -> Box<T> {
| ++++++++
error: aborting due to 2 previous errors
Some errors have detailed explanations: E0277, E0308.
For more information about an error, try `rustc --explain E0277`.

View file

@ -0,0 +1,19 @@
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
// FIXME(#149379): This should pass, but fails due to fudged expactation
// types which are potentially not well-formed or for whom the function
// where-bounds don't actually hold. And this results in weird bugs when
// later treating these expectations as if they were actually correct..
fn foo<T>(x: (T, ())) -> Box<T> {
Box::new(x.0)
}
fn main() {
// Uses expectation as its struct tail is sized, resulting in `(dyn Send, ())`
let _: Box<dyn Send> = foo(((), ()));
//~^ ERROR mismatched types
//~| ERROR the size for values of type `dyn Send` cannot be known at compilation time
}

View file

@ -0,0 +1,22 @@
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-2.rs:15:38
|
LL | let _: Box<dyn Send> = sized_box(Box::new(1));
| --------- ^^^^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `sized_box`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-2.rs:10:14
|
LL | fn sized_box<T>(x: Box<T>) -> Box<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `sized_box`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn sized_box<T: ?Sized>(x: Box<T>) -> Box<T> {
| ++++++++
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0277`.

View file

@ -0,0 +1,22 @@
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-2.rs:15:38
|
LL | let _: Box<dyn Send> = sized_box(Box::new(1));
| --------- ^^^^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `sized_box`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-2.rs:10:14
|
LL | fn sized_box<T>(x: Box<T>) -> Box<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `sized_box`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn sized_box<T: ?Sized>(x: Box<T>) -> Box<T> {
| ++++++++
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0277`.

View file

@ -0,0 +1,17 @@
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
// FIXME(#149379): This should pass, but fails due to fudged expactation
// types which are potentially not well-formed or for whom the function
// where-bounds don't actually hold. And this results in weird bugs when
// later treating these expectations as if they were actually correct..
fn sized_box<T>(x: Box<T>) -> Box<T> {
x
}
fn main() {
let _: Box<dyn Send> = sized_box(Box::new(1));
//~^ ERROR the size for values of type `dyn Send` cannot be known at compilation time
}

View file

@ -0,0 +1,87 @@
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:32:55
|
LL | let _: Box<dyn Send> = field_to_box1(Foo { field: 1, tail: () });
| ^ doesn't have a size known at compile-time
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `Foo`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:10:12
|
LL | struct Foo<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `Foo`
help: you could relax the implicit `Sized` bound on `T` if it were used through indirection like `&T` or `Box<T>`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:10:12
|
LL | struct Foo<T> {
| ^ this could be changed to `T: ?Sized`...
LL | field: T,
| - ...if indirection were used here: `Box<T>`
error[E0308]: mismatched types
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:32:55
|
LL | let _: Box<dyn Send> = field_to_box1(Foo { field: 1, tail: () });
| ^ expected trait object, found integer
|
= note: expected trait object `dyn Send`
found type `{integer}`
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:32:42
|
LL | let _: Box<dyn Send> = field_to_box1(Foo { field: 1, tail: () });
| ------------- ^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `field_to_box1`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:19:18
|
LL | fn field_to_box1<T>(x: Foo<T>) -> Box<T> {
| ^ required by the implicit `Sized` requirement on this type parameter in `field_to_box1`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn field_to_box1<T: ?Sized>(x: Foo<T>) -> Box<T> {
| ++++++++
error[E0277]: the size for values of type `dyn Send` cannot be known at compilation time
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:36:38
|
LL | let _: &dyn Send = field_to_box2(&Bar { field: 1 });
| ------------- ^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
| |
| required by a bound introduced by this call
|
= help: the trait `Sized` is not implemented for `dyn Send`
note: required by an implicit `Sized` bound in `field_to_box2`
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:23:18
|
LL | fn field_to_box2<T>(x: &Bar<T>) -> &T {
| ^ required by the implicit `Sized` requirement on this type parameter in `field_to_box2`
help: consider relaxing the implicit `Sized` restriction
|
LL | fn field_to_box2<T: ?Sized>(x: &Bar<T>) -> &T {
| ++++++++
error[E0308]: mismatched types
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:38:38
|
LL | let _: &dyn Send = field_to_box3(&(1,));
| ------------- ^^^^^ expected `&(dyn Send,)`, found `&({integer},)`
| |
| arguments to this function are incorrect
|
= note: expected reference `&(dyn Send,)`
found reference `&({integer},)`
note: function defined here
--> $DIR/fn-ret-trait-object-propagated-to-inputs-issue-149379-3.rs:27:4
|
LL | fn field_to_box3<T>(x: &(T,)) -> &T {
| ^^^^^^^^^^^^^ --------
error: aborting due to 5 previous errors
Some errors have detailed explanations: E0277, E0308.
For more information about an error, try `rustc --explain E0277`.

Some files were not shown because too many files have changed in this diff Show more